diff options
Diffstat (limited to 'spec/lib')
407 files changed, 8565 insertions, 3540 deletions
diff --git a/spec/lib/api/ci/helpers/runner_helpers_spec.rb b/spec/lib/api/ci/helpers/runner_helpers_spec.rb index d32f7e4f0be..c36c8d23e88 100644 --- a/spec/lib/api/ci/helpers/runner_helpers_spec.rb +++ b/spec/lib/api/ci/helpers/runner_helpers_spec.rb @@ -34,6 +34,7 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do context 'when runner info is present' do let(:name) { 'runner' } + let(:system_id) { 's_c2d22f638c25' } let(:version) { '1.2.3' } let(:revision) { '10.0' } let(:platform) { 'test' } @@ -42,6 +43,7 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do let(:config) { { 'gpus' => 'all' } } let(:runner_params) do { + system_id: system_id, 'info' => { 'name' => name, @@ -59,7 +61,10 @@ RSpec.describe API::Ci::Helpers::Runner, feature_category: :runner do subject(:details) { runner_helper.get_runner_details_from_request } it 'extracts the runner details', :aggregate_failures do - expect(details.keys).to match_array(%w(name version revision platform architecture executor config ip_address)) + expect(details.keys).to match_array( + %w(system_id name version revision platform architecture executor config ip_address) + ) + expect(details['system_id']).to eq(system_id) expect(details['name']).to eq(name) expect(details['version']).to eq(version) expect(details['revision']).to eq(revision) diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb index 6801d16d13e..8264db8344d 100644 --- a/spec/lib/api/ci/helpers/runner_spec.rb +++ b/spec/lib/api/ci/helpers/runner_spec.rb @@ -9,7 +9,7 @@ RSpec.describe API::Ci::Helpers::Runner do allow(helper).to receive(:env).and_return({}) end - describe '#current_job' do + describe '#current_job', feature_category: :continuous_integration do let(:build) { create(:ci_build, :running) } it 'handles sticking of a build when a build ID is specified' do @@ -38,7 +38,7 @@ RSpec.describe API::Ci::Helpers::Runner do end end - describe '#current_runner' do + describe '#current_runner', feature_category: :runner do let(:runner) { create(:ci_runner, token: 'foo') } it 'handles sticking of a runner if a token is specified' do @@ -67,7 +67,79 @@ RSpec.describe API::Ci::Helpers::Runner do end end - describe '#track_runner_authentication', :prometheus do + describe '#current_runner_machine', :freeze_time, feature_category: :runner_fleet do + let(:runner) { create(:ci_runner, token: 'foo') } + let(:runner_machine) { create(:ci_runner_machine, runner: runner, system_xid: 'bar', contacted_at: 1.hour.ago) } + + subject(:current_runner_machine) { helper.current_runner_machine } + + context 'with create_runner_machine FF enabled' do + before do + stub_feature_flags(create_runner_machine: true) + end + + context 'when runner machine already exists' do + before do + allow(helper).to receive(:params).and_return(token: runner.token, system_id: runner_machine.system_xid) + end + + it { is_expected.to eq(runner_machine) } + + it 'does not update the contacted_at field' do + expect(current_runner_machine.contacted_at).to eq 1.hour.ago + end + end + + context 'when runner machine cannot be found' do + it 'creates a new runner machine', :aggregate_failures do + allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id') + + expect { current_runner_machine }.to change { Ci::RunnerMachine.count }.by(1) + + expect(current_runner_machine).not_to be_nil + expect(current_runner_machine.system_xid).to eq('new_system_id') + expect(current_runner_machine.contacted_at).to eq(Time.current) + expect(current_runner_machine.runner).to eq(runner) + end + + it 'creates a new <legacy> runner machine if system_id is not specified', :aggregate_failures do + allow(helper).to receive(:params).and_return(token: runner.token) + + expect { current_runner_machine }.to change { Ci::RunnerMachine.count }.by(1) + + expect(current_runner_machine).not_to be_nil + expect(current_runner_machine.system_xid).to eq(::API::Ci::Helpers::Runner::LEGACY_SYSTEM_XID) + expect(current_runner_machine.runner).to eq(runner) + end + end + end + + context 'with create_runner_machine FF disabled' do + before do + stub_feature_flags(create_runner_machine: false) + end + + it 'does not return runner machine if no system_id specified' do + allow(helper).to receive(:params).and_return(token: runner.token) + + is_expected.to be_nil + end + + context 'when runner machine can not be found' do + before do + allow(helper).to receive(:params).and_return(token: runner.token, system_id: 'new_system_id') + end + + it 'does not create a new runner machine', :aggregate_failures do + expect { current_runner_machine }.not_to change { Ci::RunnerMachine.count } + + expect(current_runner_machine).to be_nil + end + end + end + end + + describe '#track_runner_authentication', :prometheus, feature_category: :runner do subject { helper.track_runner_authentication } let(:runner) { create(:ci_runner, token: 'foo') } diff --git a/spec/lib/api/entities/draft_note_spec.rb b/spec/lib/api/entities/draft_note_spec.rb new file mode 100644 index 00000000000..59555319bb1 --- /dev/null +++ b/spec/lib/api/entities/draft_note_spec.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::DraftNote, feature_category: :code_review_workflow do + let_it_be(:entity) { create(:draft_note, :on_discussion) } + let_it_be(:json) { entity.as_json } + + it 'exposes correct attributes' do + expect(json["id"]).to eq entity.id + expect(json["author_id"]).to eq entity.author_id + expect(json["merge_request_id"]).to eq entity.merge_request_id + expect(json["resolve_discussion"]).to eq entity.resolve_discussion + expect(json["discussion_id"]).to eq entity.discussion_id + expect(json["note"]).to eq entity.note + expect(json["position"].transform_keys(&:to_sym)).to eq entity.position.to_h + end +end diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb index bb0e25d2613..33f8a806c50 100644 --- a/spec/lib/api/entities/merge_request_basic_spec.rb +++ b/spec/lib/api/entities/merge_request_basic_spec.rb @@ -4,11 +4,9 @@ require 'spec_helper' RSpec.describe ::API::Entities::MergeRequestBasic do let_it_be(:user) { create(:user) } - let_it_be(:project) { create(:project, :public) } let_it_be(:merge_request) { create(:merge_request) } let_it_be(:labels) { create_list(:label, 3) } let_it_be(:merge_requests) { create_list(:labeled_merge_request, 10, :unique_branches, labels: labels) } - let_it_be(:entity) { described_class.new(merge_request) } # This mimics the behavior of the `Grape::Entity` serializer @@ -16,7 +14,7 @@ RSpec.describe ::API::Entities::MergeRequestBasic do described_class.new(obj).presented end - subject { entity.as_json } + subject(:json) { entity.as_json } it 'includes expected fields' do expected_fields = %i[ @@ -57,7 +55,7 @@ RSpec.describe ::API::Entities::MergeRequestBasic do end end - context 'reviewers' do + describe 'reviewers' do before do merge_request.reviewers = [user] end @@ -68,4 +66,26 @@ RSpec.describe ::API::Entities::MergeRequestBasic do expect(result['reviewers'][0]['username']).to eq user.username end end + + describe 'squash' do + subject { json[:squash] } + + before do + merge_request.target_project.project_setting.update!(squash_option: :never) + merge_request.update!(squash: true) + end + + it { is_expected.to eq(true) } + end + + describe 'squash_on_merge' do + subject { json[:squash_on_merge] } + + before do + merge_request.target_project.project_setting.update!(squash_option: :never) + merge_request.update!(squash: true) + end + + it { is_expected.to eq(false) } + end end diff --git a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb index db8f106c9fe..b64a1555332 100644 --- a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb +++ b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe API::Entities::Ml::Mlflow::RunInfo do +RSpec.describe API::Entities::Ml::Mlflow::RunInfo, feature_category: :mlops do let_it_be(:candidate) { create(:ml_candidates) } subject { described_class.new(candidate, packages_url: 'http://example.com').as_json } diff --git a/spec/lib/api/entities/release_spec.rb b/spec/lib/api/entities/release_spec.rb index d1e5f191614..e750b82011b 100644 --- a/spec/lib/api/entities/release_spec.rb +++ b/spec/lib/api/entities/release_spec.rb @@ -77,4 +77,16 @@ RSpec.describe API::Entities::Release do end end end + + describe 'links' do + subject(:links) { entity.as_json['_links'] } + + before do + project.add_developer(user) + end + + it 'includes links' do + expect(links.keys).to include('closed_issues_url', 'closed_merge_requests_url', 'edit_url', 'merged_merge_requests_url', 'opened_issues_url', 'opened_merge_requests_url', 'self') + end + end end diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb index 407f2894f01..3094fc748c9 100644 --- a/spec/lib/api/entities/user_spec.rb +++ b/spec/lib/api/entities/user_spec.rb @@ -18,7 +18,7 @@ RSpec.describe API::Entities::User do # UserBasic :state, :avatar_url, :web_url, # User - :created_at, :bio, :location, :public_email, :skype, :linkedin, :twitter, + :created_at, :bio, :location, :public_email, :skype, :linkedin, :twitter, :discord, :website_url, :organization, :job_title, :pronouns, :bot, :work_information, :followers, :following, :is_followed, :local_time ) diff --git a/spec/lib/api/helpers/caching_spec.rb b/spec/lib/api/helpers/caching_spec.rb index 828af7b5f91..03025823357 100644 --- a/spec/lib/api/helpers/caching_spec.rb +++ b/spec/lib/api/helpers/caching_spec.rb @@ -47,12 +47,14 @@ RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do context 'single object' do let_it_be(:presentable) { create(:todo, project: project) } + let(:expected_cache_key_prefix) { 'API::Entities::Todo' } it_behaves_like 'object cache helper' end context 'collection of objects' do let_it_be(:presentable) { Array.new(5).map { create(:todo, project: project) } } + let(:expected_cache_key_prefix) { 'API::Entities::Todo' } it_behaves_like 'collection cache helper' end diff --git a/spec/lib/api/helpers/packages_helpers_spec.rb b/spec/lib/api/helpers/packages_helpers_spec.rb index de9d139a7b6..2a663d5e9b2 100644 --- a/spec/lib/api/helpers/packages_helpers_spec.rb +++ b/spec/lib/api/helpers/packages_helpers_spec.rb @@ -275,7 +275,6 @@ RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registr let(:category) { described_class.name } let(:namespace) { project.namespace } let(:user) { project.creator } - let(:feature_flag_name) { nil } let(:label) { 'redis_hll_counters.user_packages.user_packages_total_unique_counts_monthly' } let(:property) { 'i_package_terraform_module_user' } @@ -284,5 +283,60 @@ RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registr helper.track_package_event(action, scope, **args) end end + + context 'when using deploy token and action is push package' do + let(:user) { create(:deploy_token, write_registry: true, projects: [project]) } + let(:scope) { :rubygems } + let(:category) { 'API::RubygemPackages' } + let(:namespace) { project.namespace } + let(:label) { 'counts.package_events_i_package_push_package_by_deploy_token' } + let(:property) { 'i_package_push_package_by_deploy_token' } + let(:service_ping_context) do + [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: 'counts.package_events_i_package_push_package_by_deploy_token').to_h] + end + + it 'logs a snowplow event' do + args = { category: category, namespace: namespace, project: project } + helper.track_package_event('push_package', scope, **args) + + expect_snowplow_event( + category: category, + action: 'push_package_by_deploy_token', + context: service_ping_context, + label: label, + namespace: namespace, + property: property, + project: project + ) + end + end + + context 'when guest and action is pull package' do + let(:user) { nil } + let(:scope) { :rubygems } + let(:category) { 'API::RubygemPackages' } + let(:namespace) { project.namespace } + let(:label) { 'counts.package_events_i_package_pull_package_by_guest' } + let(:property) { 'i_package_pull_package_by_guest' } + let(:service_ping_context) do + [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: 'counts.package_events_i_package_pull_package_by_guest').to_h] + end + + it 'logs a snowplow event' do + allow(helper).to receive(:current_user).and_return(nil) + args = { category: category, namespace: namespace, project: project } + helper.track_package_event('pull_package', scope, **args) + + expect_snowplow_event( + category: category, + action: 'pull_package_by_guest', + context: service_ping_context, + label: label, + namespace: namespace, + property: property, + project: project + ) + end + end end end diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb index a0f5ee1ea95..0fcf36ca9dd 100644 --- a/spec/lib/api/helpers_spec.rb +++ b/spec/lib/api/helpers_spec.rb @@ -821,7 +821,7 @@ RSpec.describe API::Helpers, feature_category: :not_owned do it 'redirects to a CDN-fronted URL' do expect(helper).to receive(:redirect) - expect(helper).to receive(:signed_head_url).and_call_original + expect(ObjectStorage::S3).to receive(:signed_head_url).and_call_original expect(Gitlab::ApplicationContext).to receive(:push).with(artifact: artifact.file.model).and_call_original subject diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb index a8ee28d3714..f1f9dd38947 100644 --- a/spec/lib/atlassian/jira_connect/client_spec.rb +++ b/spec/lib/atlassian/jira_connect/client_spec.rb @@ -199,7 +199,7 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d let(:request) { instance_double(HTTParty::Request, raw_body: '{ "foo": 1, "bar": 2 }') } it 'returns the request body' do - expect(subject.send(:request_body_schema, response)).to eq({ "foo" => nil, "bar" => nil }) + expect(subject.send(:request_body_schema, response)).to eq({ "foo" => 1, "bar" => 2 }) end end diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb index ed5b34b7f8c..c70d47e4940 100644 --- a/spec/lib/backup/database_spec.rb +++ b/spec/lib/backup/database_spec.rb @@ -2,11 +2,23 @@ require 'spec_helper' -RSpec.describe Backup::Database do +RSpec.configure do |rspec| + rspec.expect_with :rspec do |c| + c.max_formatted_output_length = nil + end +end + +RSpec.describe Backup::Database, feature_category: :backup_restore do let(:progress) { StringIO.new } let(:output) { progress.string } + let(:one_db_configured?) { Gitlab::Database.database_base_models.one? } + let(:database_models_for_backup) { Gitlab::Database.database_base_models_with_gitlab_shared } + let(:timeout_service) do + instance_double(Gitlab::Database::TransactionTimeoutSettings, restore_timeouts: nil, disable_timeouts: nil) + end before(:all) do + Rake::Task.define_task(:environment) Rake.application.rake_require 'active_record/railties/databases' Rake.application.rake_require 'tasks/gitlab/backup' Rake.application.rake_require 'tasks/gitlab/shell' @@ -14,14 +26,136 @@ RSpec.describe Backup::Database do Rake.application.rake_require 'tasks/cache' end + describe '#dump', :delete do + let(:backup_id) { 'some_id' } + let(:force) { true } + + subject { described_class.new(progress, force: force) } + + before do + database_models_for_backup.each do |database_name, base_model| + base_model.connection.rollback_transaction unless base_model.connection.open_transactions.zero? + allow(base_model.connection).to receive(:execute).and_call_original + end + end + + it 'creates gzipped database dumps' do + Dir.mktmpdir do |dir| + subject.dump(dir, backup_id) + + database_models_for_backup.each_key do |database_name| + filename = database_name == 'main' ? 'database.sql.gz' : "#{database_name}_database.sql.gz" + expect(File.exist?(File.join(dir, filename))).to eq(true) + end + end + end + + it 'uses snapshots' do + Dir.mktmpdir do |dir| + base_model = Gitlab::Database.database_base_models['main'] + expect(base_model.connection).to receive(:begin_transaction).with( + isolation: :repeatable_read + ).and_call_original + expect(base_model.connection).to receive(:execute).with( + "SELECT pg_export_snapshot() as snapshot_id;" + ).and_call_original + expect(base_model.connection).to receive(:rollback_transaction).and_call_original + + subject.dump(dir, backup_id) + end + end + + it 'disables transaction time out' do + number_of_databases = Gitlab::Database.database_base_models_with_gitlab_shared.count + expect(Gitlab::Database::TransactionTimeoutSettings) + .to receive(:new).exactly(2 * number_of_databases).times.and_return(timeout_service) + expect(timeout_service).to receive(:disable_timeouts).exactly(number_of_databases).times + expect(timeout_service).to receive(:restore_timeouts).exactly(number_of_databases).times + + Dir.mktmpdir do |dir| + subject.dump(dir, backup_id) + end + end + + describe 'pg_dump arguments' do + let(:snapshot_id) { 'fake_id' } + let(:pg_args) do + [ + '--clean', + '--if-exists', + "--snapshot=#{snapshot_id}" + ] + end + + let(:dumper) { double } + let(:destination_dir) { 'tmp' } + + before do + allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper) + allow(dumper).to receive(:dump).with(any_args).and_return(true) + + database_models_for_backup.each do |database_name, base_model| + allow(base_model.connection).to receive(:execute).with( + "SELECT pg_export_snapshot() as snapshot_id;" + ).and_return(['snapshot_id' => snapshot_id]) + end + end + + it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do + expect(dumper).to receive(:dump).with(anything, anything, pg_args) + + subject.dump(destination_dir, backup_id) + end + + context 'when a PostgreSQL schema is used' do + let(:schema) { 'gitlab' } + let(:additional_args) do + pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema| + ['-n', schema.to_s] + end + end + + before do + allow(Gitlab.config.backup).to receive(:pg_schema).and_return(schema) + end + + it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do + expect(dumper).to receive(:dump).with(anything, anything, additional_args) + + subject.dump(destination_dir, backup_id) + end + end + end + + context 'when a StandardError (or descendant) is raised' do + before do + allow(FileUtils).to receive(:mkdir_p).and_raise(StandardError) + end + + it 'restores timeouts' do + Dir.mktmpdir do |dir| + number_of_databases = Gitlab::Database.database_base_models_with_gitlab_shared.count + expect(Gitlab::Database::TransactionTimeoutSettings) + .to receive(:new).exactly(number_of_databases).times.and_return(timeout_service) + expect(timeout_service).to receive(:restore_timeouts).exactly(number_of_databases).times + + expect { subject.dump(dir, backup_id) }.to raise_error StandardError + end + end + end + end + describe '#restore' do let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1)] } - let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s } + let(:backup_dir) { Rails.root.join("spec/fixtures/") } let(:force) { true } + let(:rake_task) { instance_double(Rake::Task, invoke: true) } - subject { described_class.new(Gitlab::Database::MAIN_DATABASE_NAME.to_sym, progress, force: force) } + subject { described_class.new(progress, force: force) } before do + allow(Rake::Task).to receive(:[]).with(any_args).and_return(rake_task) + allow(subject).to receive(:pg_restore_cmd).and_return(cmd) end @@ -30,9 +164,14 @@ RSpec.describe Backup::Database do it 'warns the user and waits' do expect(subject).to receive(:sleep) - expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) - subject.restore(data) + if one_db_configured? + expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + else + expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) + end + + subject.restore(backup_dir) expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort') end @@ -43,12 +182,14 @@ RSpec.describe Backup::Database do end context 'with an empty .gz file' do - let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s } - it 'returns successfully' do - expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + if one_db_configured? + expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + else + expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) + end - subject.restore(data) + subject.restore(backup_dir) expect(output).to include("Restoring PostgreSQL database") expect(output).to include("[DONE]") @@ -57,12 +198,18 @@ RSpec.describe Backup::Database do end context 'with a corrupted .gz file' do - let(:data) { Rails.root.join("spec/fixtures/big-image.png").to_s } + before do + allow(subject).to receive(:file_name).and_return("#{backup_dir}big-image.png") + end it 'raises a backup error' do - expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + if one_db_configured? + expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + else + expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) + end - expect { subject.restore(data) }.to raise_error(Backup::Error) + expect { subject.restore(backup_dir) }.to raise_error(Backup::Error) end end @@ -72,9 +219,13 @@ RSpec.describe Backup::Database do let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] } it 'filters out noise from errors and has a post restore warning' do - expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + if one_db_configured? + expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + else + expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) + end - subject.restore(data) + subject.restore(backup_dir) expect(output).to include("ERRORS") expect(output).not_to include(noise) @@ -95,9 +246,13 @@ RSpec.describe Backup::Database do end it 'overrides default config values' do - expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + if one_db_configured? + expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke) + else + expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke) + end - subject.restore(data) + subject.restore(backup_dir) expect(output).to include(%("PGHOST"=>"test.example.com")) expect(output).to include(%("PGPASSWORD"=>"donotchange")) @@ -107,22 +262,30 @@ RSpec.describe Backup::Database do end context 'when the source file is missing' do - let(:main_database) { described_class.new(Gitlab::Database::MAIN_DATABASE_NAME.to_sym, progress, force: force) } - let(:ci_database) { described_class.new(Gitlab::Database::CI_DATABASE_NAME.to_sym, progress, force: force) } - let(:missing_file) { Rails.root.join("spec/fixtures/missing_file.tar.gz").to_s } + context 'for main database' do + before do + allow(File).to receive(:exist?).and_call_original + allow(File).to receive(:exist?).with("#{backup_dir}database.sql.gz").and_return(false) + allow(File).to receive(:exist?).with("#{backup_dir}ci_database.sql.gz").and_return(false) + end - it 'main database raises an error about missing source file' do - expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke) + it 'raises an error about missing source file' do + if one_db_configured? + expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke) + else + expect(Rake::Task['gitlab:db:drop_tables:main']).not_to receive(:invoke) + end - expect do - main_database.restore(missing_file) - end.to raise_error(Backup::Error, /Source database file does not exist/) + expect do + subject.restore('db') + end.to raise_error(Backup::Error, /Source database file does not exist/) + end end - it 'ci database tolerates missing source file' do - expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke) - skip_if_multiple_databases_not_setup - expect { ci_database.restore(missing_file) }.not_to raise_error + context 'for ci database' do + it 'ci database tolerates missing source file' do + expect { subject.restore(backup_dir) }.not_to raise_error + end end end end diff --git a/spec/lib/backup/dump/postgres_spec.rb b/spec/lib/backup/dump/postgres_spec.rb new file mode 100644 index 00000000000..f6a68ab6db9 --- /dev/null +++ b/spec/lib/backup/dump/postgres_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do + describe '#dump' do + let(:pg_database) { 'gitlabhq_test' } + let(:destination_dir) { Dir.mktmpdir } + let(:db_file_name) { File.join(destination_dir, 'output.gz') } + + let(:pipes) { IO.pipe } + let(:gzip_pid) { spawn('gzip -c -1', in: pipes[0], out: [db_file_name, 'w', 0o600]) } + let(:pg_dump_pid) { Process.spawn('pg_dump', *args, pg_database, out: pipes[1]) } + let(:args) { ['--help'] } + + subject { described_class.new } + + before do + allow(IO).to receive(:pipe).and_return(pipes) + end + + after do + FileUtils.remove_entry destination_dir + end + + it 'creates gzipped dump using supplied arguments' do + expect(subject).to receive(:spawn).with('gzip -c -1', in: pipes.first, + out: [db_file_name, 'w', 0o600]).and_return(gzip_pid) + expect(Process).to receive(:spawn).with('pg_dump', *args, pg_database, out: pipes[1]).and_return(pg_dump_pid) + + subject.dump(pg_database, db_file_name, args) + + expect(File.exist?(db_file_name)).to eq(true) + end + end +end diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb index 992dbec73c2..02889c1535d 100644 --- a/spec/lib/backup/manager_spec.rb +++ b/spec/lib/backup/manager_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Backup::Manager do +RSpec.describe Backup::Manager, feature_category: :backup_restore do include StubENV let(:progress) { StringIO.new } @@ -30,8 +30,7 @@ RSpec.describe Backup::Manager do task: task, enabled: enabled, destination_path: 'my_task.tar.gz', - human_name: 'my task', - task_group: 'group1' + human_name: 'my task' ) } end @@ -63,16 +62,6 @@ RSpec.describe Backup::Manager do subject.run_create_task('my_task') end end - - describe 'task group skipped' do - it 'informs the user' do - stub_env('SKIP', 'group1') - - expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... [SKIPPED]') - - subject.run_create_task('my_task') - end - end end describe '#run_restore_task' do diff --git a/spec/lib/banzai/color_parser_spec.rb b/spec/lib/banzai/color_parser_spec.rb index 3914aee2d4c..5647a81675e 100644 --- a/spec/lib/banzai/color_parser_spec.rb +++ b/spec/lib/banzai/color_parser_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Banzai::ColorParser do +RSpec.describe Banzai::ColorParser, feature_category: :team_planning do describe '.parse' do context 'HEX format' do [ diff --git a/spec/lib/banzai/commit_renderer_spec.rb b/spec/lib/banzai/commit_renderer_spec.rb index a10dd6eb3a2..35e0e20582d 100644 --- a/spec/lib/banzai/commit_renderer_spec.rb +++ b/spec/lib/banzai/commit_renderer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::CommitRenderer do +RSpec.describe Banzai::CommitRenderer, feature_category: :source_code_management do describe '.render', :clean_gitlab_redis_cache do it 'renders a commit description and title' do user = build(:user) diff --git a/spec/lib/banzai/cross_project_reference_spec.rb b/spec/lib/banzai/cross_project_reference_spec.rb index 8748a910003..1c861068f16 100644 --- a/spec/lib/banzai/cross_project_reference_spec.rb +++ b/spec/lib/banzai/cross_project_reference_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::CrossProjectReference do +RSpec.describe Banzai::CrossProjectReference, feature_category: :team_planning do let(:including_class) { Class.new.include(described_class).new } let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {}, {}) } diff --git a/spec/lib/banzai/filter/absolute_link_filter_spec.rb b/spec/lib/banzai/filter/absolute_link_filter_spec.rb index 0c159e8bac8..3e7678e3f9a 100644 --- a/spec/lib/banzai/filter/absolute_link_filter_spec.rb +++ b/spec/lib/banzai/filter/absolute_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::AbsoluteLinkFilter do +RSpec.describe Banzai::Filter::AbsoluteLinkFilter, feature_category: :team_planning do def filter(doc, context = {}) described_class.call(doc, context) end diff --git a/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb b/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb index 7af22ea7db1..3bd48bdd6f7 100644 --- a/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb +++ b/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::AsciiDocPostProcessingFilter do +RSpec.describe Banzai::Filter::AsciiDocPostProcessingFilter, feature_category: :wiki do include FilterSpecHelper it "adds class for elements with data-math-style" do diff --git a/spec/lib/banzai/filter/ascii_doc_sanitization_filter_spec.rb b/spec/lib/banzai/filter/ascii_doc_sanitization_filter_spec.rb index 272b4386ec8..1a76b58bc43 100644 --- a/spec/lib/banzai/filter/ascii_doc_sanitization_filter_spec.rb +++ b/spec/lib/banzai/filter/ascii_doc_sanitization_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::AsciiDocSanitizationFilter do +RSpec.describe Banzai::Filter::AsciiDocSanitizationFilter, feature_category: :wiki do include FilterSpecHelper it 'preserves footnotes refs' do diff --git a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb index 81aa8d35ebc..004c70c28f1 100644 --- a/spec/lib/banzai/filter/asset_proxy_filter_spec.rb +++ b/spec/lib/banzai/filter/asset_proxy_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::AssetProxyFilter do +RSpec.describe Banzai::Filter::AssetProxyFilter, feature_category: :team_planning do include FilterSpecHelper def image(path) diff --git a/spec/lib/banzai/filter/attributes_filter_spec.rb b/spec/lib/banzai/filter/attributes_filter_spec.rb index cef5e24cdaa..72d78960474 100644 --- a/spec/lib/banzai/filter/attributes_filter_spec.rb +++ b/spec/lib/banzai/filter/attributes_filter_spec.rb @@ -66,6 +66,8 @@ RSpec.describe Banzai::Filter::AttributesFilter, feature_category: :team_plannin where(:text, :result) do "#{image}{width=100cs}" | '<img src="example.jpg">' "#{image}{width=auto height=200}" | '<img src="example.jpg" height="200">' + "#{image}{width=10000}" | '<img src="example.jpg">' + "#{image}{width=-200}" | '<img src="example.jpg">' end with_them do diff --git a/spec/lib/banzai/filter/audio_link_filter_spec.rb b/spec/lib/banzai/filter/audio_link_filter_spec.rb index 71e069eb29f..38d1ed40a84 100644 --- a/spec/lib/banzai/filter/audio_link_filter_spec.rb +++ b/spec/lib/banzai/filter/audio_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::AudioLinkFilter do +RSpec.describe Banzai::Filter::AudioLinkFilter, feature_category: :team_planning do def filter(doc, contexts = {}) contexts.reverse_merge!({ project: project diff --git a/spec/lib/banzai/filter/autolink_filter_spec.rb b/spec/lib/banzai/filter/autolink_filter_spec.rb index 75108130602..2c75377ec42 100644 --- a/spec/lib/banzai/filter/autolink_filter_spec.rb +++ b/spec/lib/banzai/filter/autolink_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::AutolinkFilter do +RSpec.describe Banzai::Filter::AutolinkFilter, feature_category: :team_planning do include FilterSpecHelper let(:link) { 'http://about.gitlab.com/' } diff --git a/spec/lib/banzai/filter/blockquote_fence_filter_spec.rb b/spec/lib/banzai/filter/blockquote_fence_filter_spec.rb index 5712ed7da1f..575d4879f84 100644 --- a/spec/lib/banzai/filter/blockquote_fence_filter_spec.rb +++ b/spec/lib/banzai/filter/blockquote_fence_filter_spec.rb @@ -2,10 +2,10 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::BlockquoteFenceFilter do +RSpec.describe Banzai::Filter::BlockquoteFenceFilter, feature_category: :team_planning do include FilterSpecHelper - it 'converts blockquote fences to blockquote lines' do + it 'converts blockquote fences to blockquote lines', :unlimited_max_formatted_output_length do content = File.read(Rails.root.join('spec/fixtures/blockquote_fence_before.md')) expected = File.read(Rails.root.join('spec/fixtures/blockquote_fence_after.md')) diff --git a/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb b/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb index c581750d2a9..fc88c5539ca 100644 --- a/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb +++ b/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::BroadcastMessagePlaceholdersFilter do +RSpec.describe Banzai::Filter::BroadcastMessagePlaceholdersFilter, feature_category: :team_planning do include FilterSpecHelper subject { filter(text, current_user: user, broadcast_message_placeholders: true).to_html } diff --git a/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb b/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb index 67b480f8973..3b054862a26 100644 --- a/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb +++ b/spec/lib/banzai/filter/broadcast_message_sanitization_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::BroadcastMessageSanitizationFilter do +RSpec.describe Banzai::Filter::BroadcastMessageSanitizationFilter, feature_category: :team_planning do include FilterSpecHelper it_behaves_like 'default allowlist' diff --git a/spec/lib/banzai/filter/color_filter_spec.rb b/spec/lib/banzai/filter/color_filter_spec.rb index dced3671323..9a3f765b869 100644 --- a/spec/lib/banzai/filter/color_filter_spec.rb +++ b/spec/lib/banzai/filter/color_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::ColorFilter, lib: true do +RSpec.describe Banzai::Filter::ColorFilter, feature_category: :team_planning, lib: true do include FilterSpecHelper let(:color) { '#F00' } diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb index 6e29b910a6c..7fd25eac81b 100644 --- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb +++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::CustomEmojiFilter do +RSpec.describe Banzai::Filter::CustomEmojiFilter, feature_category: :team_planning do include FilterSpecHelper let_it_be(:group) { create(:group) } diff --git a/spec/lib/banzai/filter/emoji_filter_spec.rb b/spec/lib/banzai/filter/emoji_filter_spec.rb index d621f63211b..1950b0f8bfe 100644 --- a/spec/lib/banzai/filter/emoji_filter_spec.rb +++ b/spec/lib/banzai/filter/emoji_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::EmojiFilter do +RSpec.describe Banzai::Filter::EmojiFilter, feature_category: :team_planning do include FilterSpecHelper it_behaves_like 'emoji filter' do diff --git a/spec/lib/banzai/filter/external_link_filter_spec.rb b/spec/lib/banzai/filter/external_link_filter_spec.rb index 036817834d5..3f72896939d 100644 --- a/spec/lib/banzai/filter/external_link_filter_spec.rb +++ b/spec/lib/banzai/filter/external_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.shared_examples 'an external link with rel attribute' do +RSpec.shared_examples 'an external link with rel attribute', feature_category: :team_planning do it 'adds rel="nofollow" to external links' do expect(doc.at_css('a')).to have_attribute('rel') expect(doc.at_css('a')['rel']).to include 'nofollow' diff --git a/spec/lib/banzai/filter/footnote_filter_spec.rb b/spec/lib/banzai/filter/footnote_filter_spec.rb index 26bca571fdc..4b765191449 100644 --- a/spec/lib/banzai/filter/footnote_filter_spec.rb +++ b/spec/lib/banzai/filter/footnote_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::FootnoteFilter do +RSpec.describe Banzai::Filter::FootnoteFilter, feature_category: :team_planning do include FilterSpecHelper using RSpec::Parameterized::TableSyntax diff --git a/spec/lib/banzai/filter/front_matter_filter_spec.rb b/spec/lib/banzai/filter/front_matter_filter_spec.rb index f3543ab9582..b15f3ad2cd6 100644 --- a/spec/lib/banzai/filter/front_matter_filter_spec.rb +++ b/spec/lib/banzai/filter/front_matter_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::FrontMatterFilter do +RSpec.describe Banzai::Filter::FrontMatterFilter, feature_category: :team_planning do include FilterSpecHelper it 'allows for `encoding:` before the front matter' do @@ -114,7 +114,7 @@ RSpec.describe Banzai::Filter::FrontMatterFilter do foo: :foo_symbol - --- + --- # Header diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb index 23626576c0c..f30262ef9df 100644 --- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb +++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::GollumTagsFilter do +RSpec.describe Banzai::Filter::GollumTagsFilter, feature_category: :wiki do include FilterSpecHelper let(:project) { create(:project) } diff --git a/spec/lib/banzai/filter/html_entity_filter_spec.rb b/spec/lib/banzai/filter/html_entity_filter_spec.rb index d88fa21cde7..6de3fa50a1a 100644 --- a/spec/lib/banzai/filter/html_entity_filter_spec.rb +++ b/spec/lib/banzai/filter/html_entity_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::HtmlEntityFilter do +RSpec.describe Banzai::Filter::HtmlEntityFilter, feature_category: :team_planning do include FilterSpecHelper let(:unescaped) { 'foo <strike attr="foo">&&&</strike>' } diff --git a/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb b/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb index 5b32be0ea62..c5bcfe1f384 100644 --- a/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb +++ b/spec/lib/banzai/filter/image_lazy_load_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::ImageLazyLoadFilter do +RSpec.describe Banzai::Filter::ImageLazyLoadFilter, feature_category: :team_planning do include FilterSpecHelper def image(path) diff --git a/spec/lib/banzai/filter/image_link_filter_spec.rb b/spec/lib/banzai/filter/image_link_filter_spec.rb index 78d68697ac7..2d496c447e1 100644 --- a/spec/lib/banzai/filter/image_link_filter_spec.rb +++ b/spec/lib/banzai/filter/image_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::ImageLinkFilter do +RSpec.describe Banzai::Filter::ImageLinkFilter, feature_category: :team_planning do include FilterSpecHelper let(:path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' } diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb index a11fe203541..1fdb29b688e 100644 --- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb +++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter do +RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_category: :team_planning do include FilterSpecHelper let_it_be(:user) { create(:user) } diff --git a/spec/lib/banzai/filter/jira_import/adf_to_commonmark_filter_spec.rb b/spec/lib/banzai/filter/jira_import/adf_to_commonmark_filter_spec.rb index 287b5774048..5f971372dcc 100644 --- a/spec/lib/banzai/filter/jira_import/adf_to_commonmark_filter_spec.rb +++ b/spec/lib/banzai/filter/jira_import/adf_to_commonmark_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::JiraImport::AdfToCommonmarkFilter do +RSpec.describe Banzai::Filter::JiraImport::AdfToCommonmarkFilter, feature_category: :team_planning do include FilterSpecHelper let_it_be(:fixtures_path) { 'lib/kramdown/atlassian_document_format' } diff --git a/spec/lib/banzai/filter/kroki_filter_spec.rb b/spec/lib/banzai/filter/kroki_filter_spec.rb index 3f4f3aafdd6..a528c5835b2 100644 --- a/spec/lib/banzai/filter/kroki_filter_spec.rb +++ b/spec/lib/banzai/filter/kroki_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::KrokiFilter do +RSpec.describe Banzai::Filter::KrokiFilter, feature_category: :team_planning do include FilterSpecHelper it 'replaces nomnoml pre tag with img tag if kroki is enabled' do diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb index e3c8d121587..c79cd58255d 100644 --- a/spec/lib/banzai/filter/markdown_filter_spec.rb +++ b/spec/lib/banzai/filter/markdown_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::MarkdownFilter do +RSpec.describe Banzai::Filter::MarkdownFilter, feature_category: :team_planning do include FilterSpecHelper describe 'markdown engine from context' do diff --git a/spec/lib/banzai/filter/mermaid_filter_spec.rb b/spec/lib/banzai/filter/mermaid_filter_spec.rb index c9bfcffe98f..de558a0774d 100644 --- a/spec/lib/banzai/filter/mermaid_filter_spec.rb +++ b/spec/lib/banzai/filter/mermaid_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::MermaidFilter do +RSpec.describe Banzai::Filter::MermaidFilter, feature_category: :team_planning do include FilterSpecHelper it 'adds `js-render-mermaid` class to the `code` tag' do diff --git a/spec/lib/banzai/filter/normalize_source_filter_spec.rb b/spec/lib/banzai/filter/normalize_source_filter_spec.rb index 8eaeec0e7b0..e267674e3b0 100644 --- a/spec/lib/banzai/filter/normalize_source_filter_spec.rb +++ b/spec/lib/banzai/filter/normalize_source_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::NormalizeSourceFilter do +RSpec.describe Banzai::Filter::NormalizeSourceFilter, feature_category: :team_planning do include FilterSpecHelper it 'removes the UTF8 BOM from the beginning of the text' do diff --git a/spec/lib/banzai/filter/output_safety_spec.rb b/spec/lib/banzai/filter/output_safety_spec.rb index 8186935f4b2..ec10306e8a8 100644 --- a/spec/lib/banzai/filter/output_safety_spec.rb +++ b/spec/lib/banzai/filter/output_safety_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Banzai::Filter::OutputSafety do +RSpec.describe Banzai::Filter::OutputSafety, feature_category: :team_planning do subject do Class.new do include Banzai::Filter::OutputSafety diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb index 4373af90cde..a1eabc23327 100644 --- a/spec/lib/banzai/filter/plantuml_filter_spec.rb +++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::PlantumlFilter do +RSpec.describe Banzai::Filter::PlantumlFilter, feature_category: :team_planning do include FilterSpecHelper it 'replaces plantuml pre tag with img tag' do diff --git a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb index a2f34d42814..1d45b692374 100644 --- a/spec/lib/banzai/filter/reference_redactor_filter_spec.rb +++ b/spec/lib/banzai/filter/reference_redactor_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::ReferenceRedactorFilter do +RSpec.describe Banzai::Filter::ReferenceRedactorFilter, feature_category: :team_planning do include FilterSpecHelper it 'ignores non-GFM links' do diff --git a/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb b/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb index 3cb3ebc42a6..a8e08530fde 100644 --- a/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::AbstractReferenceFilter do +RSpec.describe Banzai::Filter::References::AbstractReferenceFilter, feature_category: :team_planning do let_it_be(:project) { create(:project) } let(:doc) { Nokogiri::HTML.fragment('') } diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb index c1fdee48f12..6ebf6c3cd1d 100644 --- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::AlertReferenceFilter do +RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_category: :team_planning do include FilterSpecHelper let_it_be(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb index b235de06b30..594a24fa279 100644 --- a/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::CommitRangeReferenceFilter do +RSpec.describe Banzai::Filter::References::CommitRangeReferenceFilter, feature_category: :source_code_management do include FilterSpecHelper let(:project) { create(:project, :public, :repository) } diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb index c368a852ea9..73e3bf41ee9 100644 --- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::CommitReferenceFilter do +RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_category: :source_code_management do include FilterSpecHelper let(:project) { create(:project, :public, :repository) } diff --git a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb index d616aabea45..08de9700cad 100644 --- a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::DesignReferenceFilter do +RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_category: :design_management do include FilterSpecHelper include DesignManagementTestHelpers diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb index 2e811d35662..d40041d890e 100644 --- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do +RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature_category: :team_planning do include FilterSpecHelper let_it_be_with_refind(:project) { create(:project) } diff --git a/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb b/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb index c64b66f746e..c2f4bf6caa5 100644 --- a/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::FeatureFlagReferenceFilter do +RSpec.describe Banzai::Filter::References::FeatureFlagReferenceFilter, feature_category: :feature_flags do include FilterSpecHelper let_it_be(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb index 32538948b4b..d8a97c6c3dc 100644 --- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::IssueReferenceFilter do +RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_category: :team_planning do include FilterSpecHelper include DesignManagementTestHelpers @@ -47,57 +47,55 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do end end - context 'internal reference' do - let(:reference) { "##{issue.iid}" } - + shared_examples 'an internal reference' do it_behaves_like 'a reference containing an element node' it_behaves_like 'a reference with issue type information' it 'links to a valid reference' do - doc = reference_filter("Fixed #{reference}") + doc = reference_filter("Fixed #{written_reference}") expect(doc.css('a').first.attr('href')) .to eq issue_url end it 'links with adjacent text' do - doc = reference_filter("Fixed (#{reference}.)") + doc = reference_filter("Fixed (#{written_reference}.)") expect(doc.text).to eql("Fixed (#{reference}.)") end it 'ignores invalid issue IDs' do - invalid = invalidate_reference(reference) + invalid = invalidate_reference(written_reference) exp = act = "Fixed #{invalid}" expect(reference_filter(act).to_html).to eq exp end it 'includes a title attribute' do - doc = reference_filter("Issue #{reference}") + doc = reference_filter("Issue #{written_reference}") expect(doc.css('a').first.attr('title')).to eq issue.title end it 'escapes the title attribute' do issue.update_attribute(:title, %{"></a>whatever<a title="}) - doc = reference_filter("Issue #{reference}") + doc = reference_filter("Issue #{written_reference}") expect(doc.text).to eq "Issue #{reference}" end it 'renders non-HTML tooltips' do - doc = reference_filter("Issue #{reference}") + doc = reference_filter("Issue #{written_reference}") expect(doc.at_css('a')).not_to have_attribute('data-html') end it 'includes default classes' do - doc = reference_filter("Issue #{reference}") + doc = reference_filter("Issue #{written_reference}") expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue' end it 'includes a data-project attribute' do - doc = reference_filter("Issue #{reference}") + doc = reference_filter("Issue #{written_reference}") link = doc.css('a').first expect(link).to have_attribute('data-project') @@ -105,7 +103,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do end it 'includes a data-issue attribute' do - doc = reference_filter("See #{reference}") + doc = reference_filter("See #{written_reference}") link = doc.css('a').first expect(link).to have_attribute('data-issue') @@ -113,7 +111,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do end it 'includes data attributes for issuable popover' do - doc = reference_filter("See #{reference}") + doc = reference_filter("See #{written_reference}") link = doc.css('a').first expect(link.attr('data-project-path')).to eq project.full_path @@ -121,21 +119,21 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do end it 'includes a data-original attribute' do - doc = reference_filter("See #{reference}") + doc = reference_filter("See #{written_reference}") link = doc.css('a').first expect(link).to have_attribute('data-original') - expect(link.attr('data-original')).to eq reference + expect(link.attr('data-original')).to eq written_reference end it 'does not escape the data-original attribute' do inner_html = 'element <code>node</code> inside' - doc = reference_filter(%{<a href="#{reference}">#{inner_html}</a>}) + doc = reference_filter(%{<a href="#{written_reference}">#{inner_html}</a>}) expect(doc.children.first.attr('data-original')).to eq inner_html end it 'includes a data-reference-format attribute' do - doc = reference_filter("Issue #{reference}+") + doc = reference_filter("Issue #{written_reference}+") link = doc.css('a').first expect(link).to have_attribute('data-reference-format') @@ -153,7 +151,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do end it 'supports an :only_path context' do - doc = reference_filter("Issue #{reference}", only_path: true) + doc = reference_filter("Issue #{written_reference}", only_path: true) link = doc.css('a').first.attr('href') expect(link).not_to match %r(https?://) @@ -161,7 +159,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do end it 'does not process links containing issue numbers followed by text' do - href = "#{reference}st" + href = "#{written_reference}st" doc = reference_filter("<a href='#{href}'></a>") link = doc.css('a').first.attr('href') @@ -169,6 +167,20 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do end end + context 'standard internal reference' do + let(:written_reference) { "##{issue.iid}" } + let(:reference) { "##{issue.iid}" } + + it_behaves_like 'an internal reference' + end + + context 'alternative internal_reference' do + let(:written_reference) { "GL-#{issue.iid}" } + let(:reference) { "##{issue.iid}" } + + it_behaves_like 'an internal reference' + end + context 'cross-project / cross-namespace complete reference' do let(:reference) { "#{project2.full_path}##{issue.iid}" } let(:issue) { create(:issue, project: project2) } diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb index d5b9c71b861..f8d223c6611 100644 --- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' require 'html/pipeline' -RSpec.describe Banzai::Filter::References::LabelReferenceFilter do +RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_category: :team_planning do include FilterSpecHelper let(:project) { create(:project, :public, name: 'sample-project') } diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb index 42e8cf1c857..9853d6f4093 100644 --- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do +RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_category: :code_review_workflow do include FilterSpecHelper let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb index 98090af06b1..ecd5d1368c9 100644 --- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do +RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter, feature_category: :team_planning do include FilterSpecHelper let_it_be(:parent_group) { create(:group, :public) } diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb index 0dd52b45f5d..49c71d08364 100644 --- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::ProjectReferenceFilter do +RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_category: :team_planning do include FilterSpecHelper def invalidate_reference(reference) diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb index dc43c33a08d..7307daca516 100644 --- a/spec/lib/banzai/filter/references/reference_cache_spec.rb +++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::ReferenceCache do +RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :team_planning do let_it_be(:project) { create(:project) } let_it_be(:project2) { create(:project) } let_it_be(:issue1) { create(:issue, project: project) } diff --git a/spec/lib/banzai/filter/references/reference_filter_spec.rb b/spec/lib/banzai/filter/references/reference_filter_spec.rb index 88404f2039d..b55b8fd41fa 100644 --- a/spec/lib/banzai/filter/references/reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::ReferenceFilter do +RSpec.describe Banzai::Filter::References::ReferenceFilter, feature_category: :team_planning do let(:project) { build_stubbed(:project) } describe '#each_node' do diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb index 2e324669870..32d1cb095d3 100644 --- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::SnippetReferenceFilter do +RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_category: :team_planning do include FilterSpecHelper let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb index d61b71c711d..e248f2d9b1c 100644 --- a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::References::UserReferenceFilter do +RSpec.describe Banzai::Filter::References::UserReferenceFilter, feature_category: :team_planning do include FilterSpecHelper def get_reference(user) diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb index 039ca36af6e..51832e60754 100644 --- a/spec/lib/banzai/filter/sanitization_filter_spec.rb +++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::SanitizationFilter do +RSpec.describe Banzai::Filter::SanitizationFilter, feature_category: :team_planning do include FilterSpecHelper it_behaves_like 'default allowlist' diff --git a/spec/lib/banzai/filter/spaced_link_filter_spec.rb b/spec/lib/banzai/filter/spaced_link_filter_spec.rb index 820ebeb6945..0d236cf2381 100644 --- a/spec/lib/banzai/filter/spaced_link_filter_spec.rb +++ b/spec/lib/banzai/filter/spaced_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::SpacedLinkFilter do +RSpec.describe Banzai::Filter::SpacedLinkFilter, feature_category: :team_planning do include FilterSpecHelper let(:link) { '[example](page slug)' } diff --git a/spec/lib/banzai/filter/suggestion_filter_spec.rb b/spec/lib/banzai/filter/suggestion_filter_spec.rb index d74bac4898e..e65a9214e76 100644 --- a/spec/lib/banzai/filter/suggestion_filter_spec.rb +++ b/spec/lib/banzai/filter/suggestion_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::SuggestionFilter do +RSpec.describe Banzai::Filter::SuggestionFilter, feature_category: :team_planning do include FilterSpecHelper let(:input) { %(<pre class="code highlight js-syntax-highlight language-suggestion"><code>foo\n</code></pre>) } diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb index b4be26ef8d2..0d7f322d08f 100644 --- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb +++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::SyntaxHighlightFilter do +RSpec.describe Banzai::Filter::SyntaxHighlightFilter, feature_category: :team_planning do include FilterSpecHelper shared_examples "XSS prevention" do |lang| @@ -94,8 +94,9 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do context "when #{lang} is specified" do it "highlights as plaintext but with the correct language attribute and class" do result = filter(%{<pre lang="#{lang}"><code>This is a test</code></pre>}) + copy_code_btn = '<copy-code></copy-code>' unless lang == 'suggestion' - expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}) + expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>#{copy_code_btn}</div>}) end include_examples "XSS prevention", lang @@ -107,8 +108,9 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do it "includes data-lang-params tag with extra information" do result = filter(%{<pre lang="#{lang}" data-meta="#{lang_params}"><code>This is a test</code></pre>}) + copy_code_btn = '<copy-code></copy-code>' unless lang == 'suggestion' - expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}) + expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>#{copy_code_btn}</div>}) end include_examples "XSS prevention", lang @@ -126,7 +128,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do let(:lang_params) { '-1+10' } let(:expected_result) do - %{<div class="gl-relative markdown-code-block js-markdown-code"><pre lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>} + %{<div class="gl-relative markdown-code-block js-markdown-code"><pre lang="#{lang}" class="code highlight js-syntax-highlight language-#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre></div>} end context 'when delimiter is space' do diff --git a/spec/lib/banzai/filter/table_of_contents_filter_spec.rb b/spec/lib/banzai/filter/table_of_contents_filter_spec.rb index 91c644cb16a..26c949128da 100644 --- a/spec/lib/banzai/filter/table_of_contents_filter_spec.rb +++ b/spec/lib/banzai/filter/table_of_contents_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::TableOfContentsFilter do +RSpec.describe Banzai::Filter::TableOfContentsFilter, feature_category: :team_planning do include FilterSpecHelper def header(level, text) diff --git a/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb b/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb index 082e5c92e53..322225b38a9 100644 --- a/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb +++ b/spec/lib/banzai/filter/table_of_contents_tag_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::TableOfContentsTagFilter do +RSpec.describe Banzai::Filter::TableOfContentsTagFilter, feature_category: :team_planning do include FilterSpecHelper context 'table of contents' do diff --git a/spec/lib/banzai/filter/task_list_filter_spec.rb b/spec/lib/banzai/filter/task_list_filter_spec.rb index 920904b0f29..3eef6761153 100644 --- a/spec/lib/banzai/filter/task_list_filter_spec.rb +++ b/spec/lib/banzai/filter/task_list_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::TaskListFilter do +RSpec.describe Banzai::Filter::TaskListFilter, feature_category: :team_planning do include FilterSpecHelper it 'adds `<task-button></task-button>` to every list item' do diff --git a/spec/lib/banzai/filter/timeout_html_pipeline_filter_spec.rb b/spec/lib/banzai/filter/timeout_html_pipeline_filter_spec.rb index 95d2e54459d..066f59758f0 100644 --- a/spec/lib/banzai/filter/timeout_html_pipeline_filter_spec.rb +++ b/spec/lib/banzai/filter/timeout_html_pipeline_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::TimeoutHtmlPipelineFilter do +RSpec.describe Banzai::Filter::TimeoutHtmlPipelineFilter, feature_category: :team_planning do include FilterSpecHelper it_behaves_like 'filter timeout' do diff --git a/spec/lib/banzai/filter/truncate_source_filter_spec.rb b/spec/lib/banzai/filter/truncate_source_filter_spec.rb index 8970aa1d382..8ca6a1affdd 100644 --- a/spec/lib/banzai/filter/truncate_source_filter_spec.rb +++ b/spec/lib/banzai/filter/truncate_source_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::TruncateSourceFilter do +RSpec.describe Banzai::Filter::TruncateSourceFilter, feature_category: :team_planning do include FilterSpecHelper let(:short_text) { 'foo' * 10 } diff --git a/spec/lib/banzai/filter/truncate_visible_filter_spec.rb b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb index 8daaed05264..404b23a886f 100644 --- a/spec/lib/banzai/filter/truncate_visible_filter_spec.rb +++ b/spec/lib/banzai/filter/truncate_visible_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::TruncateVisibleFilter do +RSpec.describe Banzai::Filter::TruncateVisibleFilter, feature_category: :team_planning do include FilterSpecHelper let_it_be(:project) { build(:project, :repository) } diff --git a/spec/lib/banzai/filter/upload_link_filter_spec.rb b/spec/lib/banzai/filter/upload_link_filter_spec.rb index eb45a8149c3..71656b6dc94 100644 --- a/spec/lib/banzai/filter/upload_link_filter_spec.rb +++ b/spec/lib/banzai/filter/upload_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::UploadLinkFilter do +RSpec.describe Banzai::Filter::UploadLinkFilter, feature_category: :team_planning do def filter(doc, contexts = {}) contexts.reverse_merge!( project: project, diff --git a/spec/lib/banzai/filter/video_link_filter_spec.rb b/spec/lib/banzai/filter/video_link_filter_spec.rb index a0b0ba309f5..32d7f6cfefa 100644 --- a/spec/lib/banzai/filter/video_link_filter_spec.rb +++ b/spec/lib/banzai/filter/video_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::VideoLinkFilter do +RSpec.describe Banzai::Filter::VideoLinkFilter, feature_category: :team_planning do def filter(doc, contexts = {}) contexts.reverse_merge!({ project: project @@ -11,16 +11,20 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do described_class.call(doc, contexts) end - def link_to_image(path) + def link_to_image(path, height = nil, width = nil) return '<img/>' if path.nil? - %(<img src="#{path}"/>) + attrs = %(src="#{path}") + attrs += %( width="#{width}") if width + attrs += %( height="#{height}") if height + + %(<img #{attrs}/>) end let(:project) { create(:project, :repository) } shared_examples 'a video element' do - let(:image) { link_to_image(src) } + let(:image) { link_to_image(src, height, width) } it 'replaces the image tag with a video tag' do container = filter(image).children.first @@ -32,7 +36,9 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do expect(video.name).to eq 'video' expect(video['src']).to eq src - expect(video['width']).to eq "400" + expect(video['height']).to eq height if height + expect(video['width']).to eq width if width + expect(video['width']).to eq '400' unless width || height expect(video['preload']).to eq 'metadata' expect(link.name).to eq 'a' @@ -51,6 +57,9 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do end context 'when the element src has a video extension' do + let(:height) { nil } + let(:width) { nil } + Gitlab::FileTypeDetection::SAFE_VIDEO_EXT.each do |ext| it_behaves_like 'a video element' do let(:src) { "/path/video.#{ext}" } @@ -62,6 +71,25 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do end end + context 'when the element has height or width specified' do + let(:src) { '/path/video.mp4' } + + it_behaves_like 'a video element' do + let(:height) { '100%' } + let(:width) { '50px' } + end + + it_behaves_like 'a video element' do + let(:height) { nil } + let(:width) { '50px' } + end + + it_behaves_like 'a video element' do + let(:height) { '50px' } + let(:width) { nil } + end + end + context 'when the element has no src attribute' do let(:src) { nil } @@ -85,6 +113,8 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do context 'and src is a video' do let(:src) { '/path/video.mp4' } + let(:height) { nil } + let(:width) { nil } it_behaves_like 'a video element' end diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb index 9807e385a5a..ace3aea5346 100644 --- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb +++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Filter::WikiLinkFilter do +RSpec.describe Banzai::Filter::WikiLinkFilter, feature_category: :wiki do include FilterSpecHelper let(:namespace) { build_stubbed(:namespace, name: "wiki_link_ns") } diff --git a/spec/lib/banzai/filter_array_spec.rb b/spec/lib/banzai/filter_array_spec.rb index f341d5d51a0..bb457568bee 100644 --- a/spec/lib/banzai/filter_array_spec.rb +++ b/spec/lib/banzai/filter_array_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Banzai::FilterArray do +RSpec.describe Banzai::FilterArray, feature_category: :team_planning do describe '#insert_after' do it 'inserts an element after a provided element' do filters = described_class.new(%w(a b c)) diff --git a/spec/lib/banzai/issuable_extractor_spec.rb b/spec/lib/banzai/issuable_extractor_spec.rb index 8fec9691d7f..b2c869bd066 100644 --- a/spec/lib/banzai/issuable_extractor_spec.rb +++ b/spec/lib/banzai/issuable_extractor_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::IssuableExtractor do +RSpec.describe Banzai::IssuableExtractor, feature_category: :team_planning do let(:project) { create(:project) } let(:user) { create(:user) } let(:extractor) { described_class.new(Banzai::RenderContext.new(project, user)) } diff --git a/spec/lib/banzai/object_renderer_spec.rb b/spec/lib/banzai/object_renderer_spec.rb index 8f69480c65f..b2f7db24e38 100644 --- a/spec/lib/banzai/object_renderer_spec.rb +++ b/spec/lib/banzai/object_renderer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ObjectRenderer do +RSpec.describe Banzai::ObjectRenderer, feature_category: :team_planning do let(:project) { create(:project, :repository) } let(:user) { project.first_owner } let(:renderer) do diff --git a/spec/lib/banzai/pipeline/broadcast_message_pipeline_spec.rb b/spec/lib/banzai/pipeline/broadcast_message_pipeline_spec.rb index ad4256c2045..9f1af821d11 100644 --- a/spec/lib/banzai/pipeline/broadcast_message_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/broadcast_message_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::BroadcastMessagePipeline do +RSpec.describe Banzai::Pipeline::BroadcastMessagePipeline, feature_category: :team_planning do let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, group: group) } diff --git a/spec/lib/banzai/pipeline/description_pipeline_spec.rb b/spec/lib/banzai/pipeline/description_pipeline_spec.rb index be553433e9e..fa25612a06e 100644 --- a/spec/lib/banzai/pipeline/description_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/description_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::DescriptionPipeline do +RSpec.describe Banzai::Pipeline::DescriptionPipeline, feature_category: :team_planning do let_it_be(:project) { create(:project) } def parse(html) diff --git a/spec/lib/banzai/pipeline/email_pipeline_spec.rb b/spec/lib/banzai/pipeline/email_pipeline_spec.rb index c7a0b9fefa1..4d2dca84b1b 100644 --- a/spec/lib/banzai/pipeline/email_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/email_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::EmailPipeline do +RSpec.describe Banzai::Pipeline::EmailPipeline, feature_category: :team_planning do describe '.filters' do it 'returns the expected type' do expect(described_class.filters).to be_kind_of(Banzai::FilterArray) diff --git a/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb b/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb index 8103846d4f7..6ecd7f56dec 100644 --- a/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::EmojiPipeline do +RSpec.describe Banzai::Pipeline::EmojiPipeline, feature_category: :team_planning do let(:emoji) { TanukiEmoji.find_by_alpha_code('100') } def parse(text) diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb index c1d5f16b562..ca05a353d47 100644 --- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning do + using RSpec::Parameterized::TableSyntax + describe 'References' do let(:project) { create(:project, :public) } let(:issue) { create(:issue, project: project) } @@ -157,14 +159,44 @@ RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning markdown = "\\#{issue.to_reference}" output = described_class.to_html(markdown, project: project) - expect(output).to include("<span>#</span>#{issue.iid}") + expect(output).to include("<span data-escaped-char>#</span>#{issue.iid}") end it 'converts user reference with escaped underscore because of italics' do markdown = '_@test\__' output = described_class.to_html(markdown, project: project) - expect(output).to include('<em>@test<span>_</span></em>') + expect(output).to include('<em>@test_</em>') + end + + context 'when a reference (such as a label name) is autocompleted with characters that require escaping' do + # Labels are fairly representative of the type of characters that can be in a reference + # and aligns with the testing in spec/frontend/gfm_auto_complete_spec.js + where(:valid, :label_name, :markdown) do + # These are currently not supported + # true | 'a~bug' | '~"a\~bug"' + # true | 'b~~bug~~' | '~"b\~\~bug\~\~"' + + true | 'c_bug_' | '~c_bug\_' + true | 'c_bug_' | 'Label ~c_bug\_ and _more_ text' + true | 'd _bug_' | '~"d \_bug\_"' + true | 'e*bug*' | '~"e\*bug\*"' + true | 'f *bug*' | '~"f \*bug\*"' + true | 'f *bug*' | 'Label ~"f \*bug\*" **with** more text' + true | 'g`bug`' | '~"g\`bug\`" ' + true | 'h `bug`' | '~"h \`bug\`"' + end + + with_them do + it 'detects valid escaped reference' do + create(:label, name: label_name, project: project) + + result = Banzai::Pipeline::FullPipeline.call(markdown, project: project) + + expect(result[:output].css('a').first.attr('class')).to eq 'gfm gfm-label has-tooltip gl-link gl-label-link' + expect(result[:output].css('a').first.content).to eq label_name + end + end end end diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb index f67f13b3862..d0b85a1d043 100644 --- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::GfmPipeline do +RSpec.describe Banzai::Pipeline::GfmPipeline, feature_category: :team_planning do describe 'integration between parsing regular and external issue references' do let(:project) { create(:project, :with_redmine_integration, :public) } diff --git a/spec/lib/banzai/pipeline/jira_import/adf_commonmark_pipeline_spec.rb b/spec/lib/banzai/pipeline/jira_import/adf_commonmark_pipeline_spec.rb index 74005adf673..f579b9e1883 100644 --- a/spec/lib/banzai/pipeline/jira_import/adf_commonmark_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/jira_import/adf_commonmark_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::JiraImport::AdfCommonmarkPipeline do +RSpec.describe Banzai::Pipeline::JiraImport::AdfCommonmarkPipeline, feature_category: :team_planning do let_it_be(:fixtures_path) { 'lib/kramdown/atlassian_document_format' } it 'converts text in Atlassian Document Format' do diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb index 0e4a4e4492e..e7c15ed9cf6 100644 --- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb @@ -15,10 +15,15 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_ result = described_class.call(markdown, project: project) output = result[:output].to_html - Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.pluck(:char).each do |char| - char = '&' if char == '&' - - expect(output).to include("<span>#{char}</span>") + Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.each do |item| + char = item[:char] == '&' ? '&' : item[:char] + + if item[:reference] + expect(output).to include("<span data-escaped-char>#{char}</span>") + else + expect(output).not_to include("<span data-escaped-char>#{char}</span>") + expect(output).to include(char) + end end expect(result[:escaped_literals]).to be_truthy diff --git a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb index e8df395564a..072d77f4112 100644 --- a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::PostProcessPipeline do +RSpec.describe Banzai::Pipeline::PostProcessPipeline, feature_category: :team_planning do subject { described_class.call(doc, context) } let_it_be(:project) { create(:project, :public, :repository) } diff --git a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb index 303d0fcb6c2..55575d4cf84 100644 --- a/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/pre_process_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::PreProcessPipeline do +RSpec.describe Banzai::Pipeline::PreProcessPipeline, feature_category: :team_planning do it 'pre-processes the source text' do markdown = <<~MD \xEF\xBB\xBF--- diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb index 59f5e4a6900..837ea2d7bc0 100644 --- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Pipeline::WikiPipeline do +RSpec.describe Banzai::Pipeline::WikiPipeline, feature_category: :wiki do let_it_be(:namespace) { create(:namespace, name: "wiki_link_ns") } let_it_be(:project) { create(:project, :public, name: "wiki_link_project", namespace: namespace) } let_it_be(:wiki) { ProjectWiki.new(project, nil) } diff --git a/spec/lib/banzai/pipeline_spec.rb b/spec/lib/banzai/pipeline_spec.rb index b2c970e4394..c1199654cd6 100644 --- a/spec/lib/banzai/pipeline_spec.rb +++ b/spec/lib/banzai/pipeline_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Banzai::Pipeline do +RSpec.describe Banzai::Pipeline, feature_category: :team_planning do describe '.[]' do subject { described_class[name] } diff --git a/spec/lib/banzai/querying_spec.rb b/spec/lib/banzai/querying_spec.rb index fc7aaa94954..8f95816b073 100644 --- a/spec/lib/banzai/querying_spec.rb +++ b/spec/lib/banzai/querying_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe Banzai::Querying do +RSpec.describe Banzai::Querying, feature_category: :team_planning do describe '.css' do it 'optimizes queries for elements with classes' do document = double(:document) diff --git a/spec/lib/banzai/reference_parser/alert_parser_spec.rb b/spec/lib/banzai/reference_parser/alert_parser_spec.rb index 0a9499fe6e4..33d7a7130dd 100644 --- a/spec/lib/banzai/reference_parser/alert_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/alert_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::AlertParser do +RSpec.describe Banzai::ReferenceParser::AlertParser, feature_category: :team_planning do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb index 61751b69842..bc7a93a7cde 100644 --- a/spec/lib/banzai/reference_parser/base_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::BaseParser do +RSpec.describe Banzai::ReferenceParser::BaseParser, feature_category: :team_planning do include ReferenceParserHelpers let(:user) { create(:user) } diff --git a/spec/lib/banzai/reference_parser/commit_parser_spec.rb b/spec/lib/banzai/reference_parser/commit_parser_spec.rb index 3569a1019f0..081bfa26fb2 100644 --- a/spec/lib/banzai/reference_parser/commit_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/commit_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::CommitParser do +RSpec.describe Banzai::ReferenceParser::CommitParser, feature_category: :source_code_management do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb b/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb index 172347fc421..e058793c659 100644 --- a/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/commit_range_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::CommitRangeParser do +RSpec.describe Banzai::ReferenceParser::CommitRangeParser, feature_category: :source_code_management do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/design_parser_spec.rb b/spec/lib/banzai/reference_parser/design_parser_spec.rb index a9cb2952c26..c490cf5b36d 100644 --- a/spec/lib/banzai/reference_parser/design_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/design_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::DesignParser do +RSpec.describe Banzai::ReferenceParser::DesignParser, feature_category: :design_management do include ReferenceParserHelpers include DesignManagementTestHelpers diff --git a/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb b/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb index 0c1b98e5ec3..1b2c9792cf7 100644 --- a/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::ExternalIssueParser do +RSpec.describe Banzai::ReferenceParser::ExternalIssueParser, feature_category: :team_planning do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/feature_flag_parser_spec.rb b/spec/lib/banzai/reference_parser/feature_flag_parser_spec.rb index 288eb9ae360..ba71949ee44 100644 --- a/spec/lib/banzai/reference_parser/feature_flag_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/feature_flag_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::FeatureFlagParser do +RSpec.describe Banzai::ReferenceParser::FeatureFlagParser, feature_category: :feature_flags do include ReferenceParserHelpers subject { described_class.new(Banzai::RenderContext.new(project, user)) } diff --git a/spec/lib/banzai/reference_parser/issue_parser_spec.rb b/spec/lib/banzai/reference_parser/issue_parser_spec.rb index c180a42c91e..2efdb928b6f 100644 --- a/spec/lib/banzai/reference_parser/issue_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/issue_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::IssueParser do +RSpec.describe Banzai::ReferenceParser::IssueParser, feature_category: :team_planning do include ReferenceParserHelpers let_it_be(:group) { create(:group, :public) } diff --git a/spec/lib/banzai/reference_parser/label_parser_spec.rb b/spec/lib/banzai/reference_parser/label_parser_spec.rb index 8f287e15b37..e27af57f15d 100644 --- a/spec/lib/banzai/reference_parser/label_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/label_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::LabelParser do +RSpec.describe Banzai::ReferenceParser::LabelParser, feature_category: :team_planning do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb b/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb index 576e629d271..c5302d52270 100644 --- a/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::MentionedGroupParser do +RSpec.describe Banzai::ReferenceParser::MentionedGroupParser, feature_category: :team_planning do include ReferenceParserHelpers let(:group) { create(:group, :private) } diff --git a/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb b/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb index 983407addce..242a1720f39 100644 --- a/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::MentionedProjectParser do +RSpec.describe Banzai::ReferenceParser::MentionedProjectParser, feature_category: :team_planning do include ReferenceParserHelpers let(:group) { create(:group, :private) } diff --git a/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb b/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb index f117d796dad..14a5bf04752 100644 --- a/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::MentionedUserParser do +RSpec.describe Banzai::ReferenceParser::MentionedUserParser, feature_category: :team_planning do include ReferenceParserHelpers let(:group) { create(:group, :private) } diff --git a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb index 3fbda7f3239..eead5019217 100644 --- a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::MergeRequestParser do +RSpec.describe Banzai::ReferenceParser::MergeRequestParser, feature_category: :code_review_workflow do include ReferenceParserHelpers let(:group) { create(:group, :public) } diff --git a/spec/lib/banzai/reference_parser/milestone_parser_spec.rb b/spec/lib/banzai/reference_parser/milestone_parser_spec.rb index 95f71154e38..1e7a492cb35 100644 --- a/spec/lib/banzai/reference_parser/milestone_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/milestone_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::MilestoneParser do +RSpec.describe Banzai::ReferenceParser::MilestoneParser, feature_category: :team_planning do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/project_parser_spec.rb b/spec/lib/banzai/reference_parser/project_parser_spec.rb index 2c0b6c417b0..90a3660a0e3 100644 --- a/spec/lib/banzai/reference_parser/project_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/project_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::ProjectParser do +RSpec.describe Banzai::ReferenceParser::ProjectParser, feature_category: :team_planning do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb index e8ef4e7f6e3..8f4148be2dc 100644 --- a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::SnippetParser do +RSpec.describe Banzai::ReferenceParser::SnippetParser, feature_category: :team_planning do include ReferenceParserHelpers let(:project) { create(:project, :public) } diff --git a/spec/lib/banzai/reference_parser/user_parser_spec.rb b/spec/lib/banzai/reference_parser/user_parser_spec.rb index d4f4339cf17..179e6e73fa3 100644 --- a/spec/lib/banzai/reference_parser/user_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/user_parser_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceParser::UserParser do +RSpec.describe Banzai::ReferenceParser::UserParser, feature_category: :team_planning do include ReferenceParserHelpers let(:group) { create(:group) } diff --git a/spec/lib/banzai/reference_redactor_spec.rb b/spec/lib/banzai/reference_redactor_spec.rb index 344b8988296..8a8f3ce586a 100644 --- a/spec/lib/banzai/reference_redactor_spec.rb +++ b/spec/lib/banzai/reference_redactor_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::ReferenceRedactor do +RSpec.describe Banzai::ReferenceRedactor, feature_category: :team_planning do let(:user) { create(:user) } let(:project) { build(:project) } let(:redactor) { described_class.new(Banzai::RenderContext.new(project, user)) } diff --git a/spec/lib/banzai/render_context_spec.rb b/spec/lib/banzai/render_context_spec.rb index 4b5c2c5a7df..76423b5805c 100644 --- a/spec/lib/banzai/render_context_spec.rb +++ b/spec/lib/banzai/render_context_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::RenderContext do +RSpec.describe Banzai::RenderContext, feature_category: :team_planning do let(:document) { Nokogiri::HTML.fragment('<p>hello</p>') } describe '#project_for_node' do diff --git a/spec/lib/banzai/renderer_spec.rb b/spec/lib/banzai/renderer_spec.rb index 705f44baf16..8c9d8d51d5f 100644 --- a/spec/lib/banzai/renderer_spec.rb +++ b/spec/lib/banzai/renderer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Banzai::Renderer do +RSpec.describe Banzai::Renderer, feature_category: :team_planning do let(:renderer) { described_class } def fake_object(fresh:) diff --git a/spec/lib/bulk_imports/clients/graphql_spec.rb b/spec/lib/bulk_imports/clients/graphql_spec.rb index a5b5e96e594..58e6992698c 100644 --- a/spec/lib/bulk_imports/clients/graphql_spec.rb +++ b/spec/lib/bulk_imports/clients/graphql_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Clients::Graphql do +RSpec.describe BulkImports::Clients::Graphql, feature_category: :importers do let_it_be(:config) { create(:bulk_import_configuration) } subject { described_class.new(url: config.url, token: config.access_token) } @@ -11,30 +11,60 @@ RSpec.describe BulkImports::Clients::Graphql do let(:query) { '{ metadata { version } }' } let(:graphql_client_double) { double } let(:response_double) { double } + let(:version) { '14.0.0' } before do stub_const('BulkImports::MINIMUM_COMPATIBLE_MAJOR_VERSION', version) - allow(graphql_client_double).to receive(:execute) - allow(subject).to receive(:client).and_return(graphql_client_double) - allow(graphql_client_double).to receive(:execute).with(query).and_return(response_double) - allow(response_double).to receive_message_chain(:data, :metadata, :version).and_return(version) end - context 'when source instance is compatible' do - let(:version) { '14.0.0' } + describe 'source instance validation' do + before do + allow(graphql_client_double).to receive(:execute) + allow(subject).to receive(:client).and_return(graphql_client_double) + allow(graphql_client_double).to receive(:execute).with(query).and_return(response_double) + allow(response_double).to receive_message_chain(:data, :metadata, :version).and_return(version) + end - it 'marks source instance as compatible' do - subject.execute('test') + context 'when source instance is compatible' do + it 'marks source instance as compatible' do + subject.execute('test') - expect(subject.instance_variable_get(:@compatible_instance_version)).to eq(true) + expect(subject.instance_variable_get(:@compatible_instance_version)).to eq(true) + end + end + + context 'when source instance is incompatible' do + let(:version) { '13.0.0' } + + it 'raises an error' do + expect { subject.execute('test') }.to raise_error(::BulkImports::Error, "Unsupported GitLab version. Source instance must run GitLab version #{BulkImport::MIN_MAJOR_VERSION} or later.") + end end end - context 'when source instance is incompatible' do - let(:version) { '13.0.0' } + describe 'network errors' do + before do + allow(Gitlab::HTTP) + .to receive(:post) + .and_return(response_double) + end + + context 'when response cannot be parsed' do + let(:response_double) { instance_double(HTTParty::Response, body: 'invalid', success?: true) } + + it 'raises network error' do + expect { subject.execute('test') }.to raise_error(BulkImports::NetworkError, /unexpected character/) + end + end + + context 'when response is unsuccessful' do + let(:response_double) { instance_double(HTTParty::Response, success?: false, code: 503) } + + it 'raises network error' do + allow(response_double).to receive_message_chain(:request, :path, :path).and_return('foo/bar') - it 'raises an error' do - expect { subject.execute('test') }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MIN_MAJOR_VERSION}.") + expect { subject.execute('test') }.to raise_error(BulkImports::NetworkError, 'Unsuccessful response 503 from foo/bar') + end end end end diff --git a/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb index 241bd694a2c..badc4a45c86 100644 --- a/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline do +RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, group: group) } @@ -53,7 +53,7 @@ RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Group', + destination_slug: 'My-Destination-Group', destination_namespace: group.full_path ) end @@ -78,7 +78,7 @@ RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline do group: group, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Group', + destination_slug: 'My-Destination-Group', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb index ac516418ce8..23368f38889 100644 --- a/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/labels_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do +RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } let_it_be(:bulk_import) { create(:bulk_import, user: user) } @@ -13,7 +13,7 @@ RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline do group: group, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Group', + destination_slug: 'My-Destination-Group', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb index 902b29bc365..8a61e927712 100644 --- a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do +RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline, feature_category: :importers do let(:user) { create(:user) } let(:group) { create(:group) } let(:bulk_import) { create(:bulk_import, user: user) } @@ -97,7 +97,7 @@ RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do group: group, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Group', + destination_slug: 'My-Destination-Group', destination_namespace: group.full_path ) end @@ -119,7 +119,7 @@ RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb index 35ca67c8a4c..d6622785e65 100644 --- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline, feature_category: :import do +RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline, feature_category: :importers do let_it_be(:project) { create(:project) } let_it_be(:group) { create(:group) } diff --git a/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb index 0eefb7390dc..30eef0b9f9e 100644 --- a/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb +++ b/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Common::Pipelines::WikiPipeline do +RSpec.describe BulkImports::Common::Pipelines::WikiPipeline, feature_category: :importers do describe '#run' do let_it_be(:user) { create(:user) } let_it_be(:bulk_import) { create(:bulk_import, user: user) } @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Common::Pipelines::WikiPipeline do :project_entity, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Wiki', + destination_slug: 'My-Destination-Wiki', destination_namespace: parent.full_path, project: parent ) diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb index 69363bf0866..7d1f9ae5da0 100644 --- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb +++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Groups::Loaders::GroupLoader do +RSpec.describe BulkImports::Groups::Loaders::GroupLoader, feature_category: :importers do describe '#load' do let_it_be(:user) { create(:user) } let_it_be(:bulk_import) { create(:bulk_import, user: user) } @@ -29,19 +29,9 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do end end - context 'when group exists' do - it 'raises an error' do - group1 = create(:group) - group2 = create(:group, parent: group1) - entity.update!(destination_namespace: group1.full_path) - data = { 'path' => group2.path } - - expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'Group exists') - end - end - context 'when there are other group errors' do it 'raises an error with those errors' do + entity.update!(destination_namespace: '') group = ::Group.new group.validate expected_errors = group.errors.full_messages.to_sentence diff --git a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb index c07d27e973f..395f3568913 100644 --- a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb +++ b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline do +RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:destination_group) { create(:group) } @@ -20,8 +20,9 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline do let(:extracted_data) do BulkImports::Pipeline::ExtractedData.new(data: { 'id' => 'gid://gitlab/Project/1234567', - 'name' => 'project', - 'full_path' => 'group/project' + 'name' => 'My Project', + 'path' => 'my-project', + 'full_path' => 'group/my-project' }) end @@ -42,8 +43,9 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline do project_entity = BulkImports::Entity.last expect(project_entity.source_type).to eq('project_entity') - expect(project_entity.source_full_path).to eq('group/project') - expect(project_entity.destination_name).to eq('project') + expect(project_entity.source_full_path).to eq('group/my-project') + expect(project_entity.destination_slug).to eq('my-project') + expect(project_entity.destination_name).to eq('my-project') expect(project_entity.destination_namespace).to eq(destination_group.full_path) expect(project_entity.source_xid).to eq(1234567) end diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb index 32d8dc8e207..138a92a7e6b 100644 --- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb +++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb @@ -2,11 +2,11 @@ require 'spec_helper' -RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do +RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer, feature_category: :importers do describe '#transform' do - let_it_be(:parent) { create(:group) } - let(:bulk_import) { build_stubbed(:bulk_import) } + let(:destination_group) { create(:group) } + let(:destination_namespace) { destination_group.full_path } let(:entity) do build_stubbed( @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do bulk_import: bulk_import, source_full_path: 'source/full/path', destination_slug: 'destination-slug-path', - destination_namespace: parent.full_path + destination_namespace: destination_namespace ) end @@ -40,15 +40,13 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do } end - subject { described_class.new } + subject(:transformed_data) { described_class.new.transform(context, data) } it 'returns original data with some keys transformed' do - transformed_data = subject.transform(context, data) - expect(transformed_data).to eq({ 'name' => 'Source Group Name', 'description' => 'Source Group Description', - 'parent_id' => parent.id, + 'parent_id' => destination_group.id, 'path' => entity.destination_slug, 'visibility_level' => Gitlab::VisibilityLevel.string_options[data['visibility']], 'project_creation_level' => Gitlab::Access.project_creation_string_options[data['project_creation_level']], @@ -64,21 +62,21 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do end context 'when some fields are not present' do - it 'does not include those fields' do - data = { + let(:data) do + { 'name' => 'Source Group Name', 'description' => 'Source Group Description', 'path' => 'source-group-path', 'full_path' => 'source/full/path' } + end - transformed_data = subject.transform(context, data) - + it 'does not include those fields' do expect(transformed_data).to eq({ 'name' => 'Source Group Name', 'path' => 'destination-slug-path', 'description' => 'Source Group Description', - 'parent_id' => parent.id, + 'parent_id' => destination_group.id, 'share_with_group_lock' => nil, 'emails_disabled' => nil, 'lfs_enabled' => nil, @@ -89,9 +87,7 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do describe 'parent group transformation' do it 'sets parent id' do - transformed_data = subject.transform(context, data) - - expect(transformed_data['parent_id']).to eq(parent.id) + expect(transformed_data['parent_id']).to eq(destination_group.id) end context 'when destination namespace is empty' do @@ -100,8 +96,6 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do end it 'does not set parent id' do - transformed_data = subject.transform(context, data) - expect(transformed_data).not_to have_key('parent_id') end end @@ -114,8 +108,6 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do end it 'does not transform name' do - transformed_data = subject.transform(context, data) - expect(transformed_data['name']).to eq('Source Group Name') end end @@ -123,35 +115,39 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do context 'when destination namespace is present' do context 'when destination namespace does not have a group with same name' do it 'does not transform name' do - transformed_data = subject.transform(context, data) - expect(transformed_data['name']).to eq('Source Group Name') end end context 'when destination namespace already have a group with the same name' do before do - create(:group, parent: parent, name: 'Source Group Name', path: 'group_1') - create(:group, parent: parent, name: 'Source Group Name(1)', path: 'group_2') - create(:group, parent: parent, name: 'Source Group Name(2)', path: 'group_3') - create(:group, parent: parent, name: 'Source Group Name(1)(1)', path: 'group_4') + create(:group, parent: destination_group, name: 'Source Group Name', path: 'group_1') + create(:group, parent: destination_group, name: 'Source Group Name(1)', path: 'group_2') + create(:group, parent: destination_group, name: 'Source Group Name(2)', path: 'group_3') + create(:group, parent: destination_group, name: 'Source Group Name(1)(1)', path: 'group_4') end it 'makes the name unique by appeding a counter', :aggregate_failures do - transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name')) + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name')) expect(transformed_data['name']).to eq('Source Group Name(3)') - transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name(2)')) + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name(2)')) expect(transformed_data['name']).to eq('Source Group Name(2)(1)') - transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name(1)')) + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name(1)')) expect(transformed_data['name']).to eq('Source Group Name(1)(2)') - transformed_data = subject.transform(context, data.merge('name' => 'Source Group Name(1)(1)')) + transformed_data = described_class.new.transform(context, data.merge('name' => 'Source Group Name(1)(1)')) expect(transformed_data['name']).to eq('Source Group Name(1)(1)(1)') end end end end + + describe 'visibility level' do + subject(:transformed_data) { described_class.new.transform(context, data) } + + include_examples 'visibility level settings' + end end end diff --git a/spec/lib/bulk_imports/pipeline/context_spec.rb b/spec/lib/bulk_imports/pipeline/context_spec.rb index 83d6f494d53..0f1d00172cd 100644 --- a/spec/lib/bulk_imports/pipeline/context_spec.rb +++ b/spec/lib/bulk_imports/pipeline/context_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Pipeline::Context do create( :bulk_import_entity, source_full_path: 'source/full/path', - destination_name: 'My Destination Group', + destination_slug: 'My-Destination-Group', destination_namespace: group.full_path, group: group, bulk_import: bulk_import diff --git a/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb index e2744a6a457..b35ba612197 100644 --- a/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb index 98a2e8b6a57..a78f524b227 100644 --- a/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb index 19e3a1fecc3..a0789522ea8 100644 --- a/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb index e780cde4ae2..5b85b3eee79 100644 --- a/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb @@ -15,7 +15,7 @@ RSpec.describe BulkImports::Projects::Pipelines::MergeRequestsPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb index 12713f008bb..0bfd9410808 100644 --- a/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb index 567a0a4fcc3..09385a261b6 100644 --- a/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline do source_type: :project_entity, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb index 3c3d0a6d1c4..895d37ea385 100644 --- a/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline do +RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project) } let_it_be(:bulk_import) { create(:bulk_import, user: user) } @@ -19,11 +19,44 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline do let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - let(:issue) { create(:issue, project: project, description: 'https://my.gitlab.com/source/full/path/-/issues/1') } - let(:mr) { create(:merge_request, source_project: project, description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1') } - let(:issue_note) { create(:note, project: project, noteable: issue, note: 'https://my.gitlab.com/source/full/path/-/issues/1') } - let(:mr_note) { create(:note, project: project, noteable: mr, note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1') } + let(:mr) do + create( + :merge_request, + source_project: project, + description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1' + ) + end + + let(:issue_note) do + create( + :note, + project: project, + noteable: issue, + note: 'https://my.gitlab.com/source/full/path/-/issues/1' + ) + end + + let(:mr_note) do + create( + :note, + project: project, + noteable: mr, + note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1' + ) + end + + let(:old_note_html) { 'old note_html' } + let(:system_note) do + create( + :note, + project: project, + system: true, + noteable: issue, + note: "mentioned in merge request !#{mr.iid}", + note_html: old_note_html + ) + end subject(:pipeline) { described_class.new(context) } @@ -32,7 +65,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline do end def create_project_data - [issue, mr, issue_note, mr_note] + [issue, mr, issue_note, mr_note, system_note] end describe '#extract' do @@ -43,6 +76,10 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline do expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData) expect(extracted_data.data).to contain_exactly(issue_note, mr, issue, mr_note) + expect(system_note.note_html).not_to eq(old_note_html) + expect(system_note.note_html) + .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a></p>") + .and include(project.full_path.to_s) end end @@ -122,9 +159,11 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline do it 'does not save the object' do expect(mr).not_to receive(:save!) expect(mr_note).not_to receive(:save!) + expect(system_note).not_to receive(:save!) subject.load(context, mr) subject.load(context, mr_note) + subject.load(context, system_note) end end end diff --git a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb index a376cdd712c..339ca727b57 100644 --- a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb index a968104fc91..0cc8da80a61 100644 --- a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do +RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:parent) { create(:project) } let_it_be(:bulk_import) { create(:bulk_import, user: user) } @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do :project_entity, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Repository', + destination_slug: 'My-Destination-Repository', destination_namespace: parent.full_path, project: parent ) diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb index dae879de998..41b3ea37804 100644 --- a/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb @@ -14,7 +14,7 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do project: project, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: group.full_path ) end diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb index dfd01cdf4bb..56c0f8c8807 100644 --- a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb @@ -15,7 +15,7 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do project: project, bulk_import: bulk_import_configuration.bulk_import, source_full_path: 'source/full/path', - destination_name: 'My Destination Project', + destination_slug: 'My-Destination-Project', destination_namespace: project.full_path ) end diff --git a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb index c1c4d0bf0db..36dc63a9331 100644 --- a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb +++ b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb @@ -2,27 +2,27 @@ require 'spec_helper' -RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer do +RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer, feature_category: :importers do describe '#transform' do let_it_be(:user) { create(:user) } - let_it_be(:destination_group) { create(:group) } let_it_be(:project) { create(:project, name: 'My Source Project') } let_it_be(:bulk_import) { create(:bulk_import, user: user) } - let_it_be(:entity) do + let(:entity) do create( :bulk_import_entity, source_type: :project_entity, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_slug: 'Destination Project Name', - destination_namespace: destination_group.full_path + destination_slug: 'Destination-Project-Name', + destination_namespace: destination_namespace ) end - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - + let(:destination_group) { create(:group) } + let(:destination_namespace) { destination_group.full_path } + let(:tracker) { create(:bulk_import_tracker, entity: entity) } + let(:context) { BulkImports::Pipeline::Context.new(tracker) } let(:data) do { 'visibility' => 'private', @@ -40,13 +40,6 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer expect(transformed_data[:path]).to eq(entity.destination_slug.parameterize) end - it 'transforms visibility level' do - visibility = data['visibility'] - - expect(transformed_data).not_to have_key(:visibility) - expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel.string_options[visibility]) - end - it 'adds import type' do expect(transformed_data[:import_type]).to eq(described_class::PROJECT_IMPORT_TYPE) end @@ -65,7 +58,7 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer source_type: :project_entity, bulk_import: bulk_import, source_full_path: 'source/full/path', - destination_slug: 'Destination Project Name', + destination_slug: 'Destination-Project-Name', destination_namespace: '' ) @@ -89,8 +82,12 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer transformed_data = described_class.new.transform(context, data) expect(transformed_data.keys) - .to contain_exactly(:created_at, :import_type, :name, :namespace_id, :path, :visibility_level) + .to contain_exactly('created_at', 'import_type', 'name', 'namespace_id', 'path', 'visibility_level') end end + + describe 'visibility level' do + include_examples 'visibility level settings' + end end end diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb index ca8af9413f3..93a09bf5a0a 100644 --- a/spec/lib/extracts_ref_spec.rb +++ b/spec/lib/extracts_ref_spec.rb @@ -48,13 +48,11 @@ RSpec.describe ExtractsRef do context 'when a ref_type parameter is provided' do let(:params) { ActionController::Parameters.new(path: path, ref: ref, ref_type: 'tags') } - context 'and the use_ref_type_parameter feature flag is enabled' do - it 'sets a fully_qualified_ref variable' do - fully_qualified_ref = "refs/tags/#{ref}" - expect(container.repository).to receive(:commit).with(fully_qualified_ref) - assign_ref_vars - expect(@fully_qualified_ref).to eq(fully_qualified_ref) - end + it 'sets a fully_qualified_ref variable' do + fully_qualified_ref = "refs/tags/#{ref}" + expect(container.repository).to receive(:commit).with(fully_qualified_ref) + assign_ref_vars + expect(@fully_qualified_ref).to eq(fully_qualified_ref) end end end diff --git a/spec/lib/feature_groups/gitlab_team_members_spec.rb b/spec/lib/feature_groups/gitlab_team_members_spec.rb new file mode 100644 index 00000000000..f4db02e6c58 --- /dev/null +++ b/spec/lib/feature_groups/gitlab_team_members_spec.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe FeatureGroups::GitlabTeamMembers, feature_category: :shared do + let_it_be(:gitlab_com) { create(:group) } + let_it_be_with_reload(:member) { create(:user).tap { |user| gitlab_com.add_developer(user) } } + let_it_be_with_reload(:non_member) { create(:user) } + + before do + stub_const("#{described_class.name}::GITLAB_COM_GROUP_ID", gitlab_com.id) + end + + describe '#enabled?' do + context 'when not on gitlab.com' do + before do + allow(Gitlab).to receive(:com?).and_return(false) + end + + it 'returns false' do + expect(described_class.enabled?(member)).to eq(false) + end + end + + context 'when on gitlab.com' do + before do + allow(Gitlab).to receive(:com?).and_return(true) + end + + it 'returns true for gitlab-com group members' do + expect(described_class.enabled?(member)).to eq(true) + end + + it 'returns false for users not in gitlab-com' do + expect(described_class.enabled?(non_member)).to eq(false) + end + + it 'returns false when actor is not a user' do + expect(described_class.enabled?(gitlab_com)).to eq(false) + end + + it 'reloads members after 1 hour' do + expect(described_class.enabled?(non_member)).to eq(false) + + gitlab_com.add_developer(non_member) + + travel_to(2.hours.from_now) do + expect(described_class.enabled?(non_member)).to eq(true) + end + end + + it 'does not make queries on subsequent calls', :use_clean_rails_memory_store_caching do + described_class.enabled?(member) + non_member + + queries = ActiveRecord::QueryRecorder.new do + described_class.enabled?(member) + described_class.enabled?(non_member) + end + + expect(queries.count).to eq(0) + end + end + end +end diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb index c087931d36a..c86bc36057a 100644 --- a/spec/lib/feature_spec.rb +++ b/spec/lib/feature_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Feature, stub_feature_flags: false do +RSpec.describe Feature, stub_feature_flags: false, feature_category: :shared do include StubVersion before do @@ -154,6 +154,17 @@ RSpec.describe Feature, stub_feature_flags: false do end end + describe '.register_feature_groups' do + before do + Flipper.unregister_groups + described_class.register_feature_groups + end + + it 'registers expected groups' do + expect(Flipper.groups).to include(an_object_having_attributes(name: :gitlab_team_members)) + end + end + describe '.enabled?' do before do allow(Feature).to receive(:log_feature_flag_states?).and_return(false) @@ -350,6 +361,22 @@ RSpec.describe Feature, stub_feature_flags: false do end end + context 'with gitlab_team_members feature group' do + let(:actor) { build_stubbed(:user) } + + before do + Flipper.unregister_groups + described_class.register_feature_groups + described_class.enable(:enabled_feature_flag, :gitlab_team_members) + end + + it 'delegates check to FeatureGroups::GitlabTeamMembers' do + expect(FeatureGroups::GitlabTeamMembers).to receive(:enabled?).with(actor) + + described_class.enabled?(:enabled_feature_flag, actor) + end + end + context 'with an individual actor' do let(:actor) { stub_feature_flag_gate('CustomActor:5') } let(:another_actor) { stub_feature_flag_gate('CustomActor:10') } diff --git a/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb b/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb new file mode 100644 index 00000000000..7c7ca8207ff --- /dev/null +++ b/spec/lib/generators/gitlab/partitioning/foreign_keys_generator_spec.rb @@ -0,0 +1,127 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'active_support/testing/stream' + +RSpec.describe Gitlab::Partitioning::ForeignKeysGenerator, :migration, :silence_stdout, +feature_category: :continuous_integration do + include ActiveSupport::Testing::Stream + include MigrationsHelpers + + before do + ActiveRecord::Schema.define do + create_table :_test_tmp_builds, force: :cascade do |t| + t.integer :partition_id + t.index [:id, :partition_id], unique: true + end + + create_table :_test_tmp_metadata, force: :cascade do |t| + t.integer :partition_id + t.references :builds, foreign_key: { to_table: :_test_tmp_builds, on_delete: :cascade } + end + end + end + + after do + FileUtils.rm_rf(destination_root) + + table(:schema_migrations).where(version: migrations.map(&:version)).delete_all + + active_record_base.connection.execute(<<~SQL) + DROP TABLE _test_tmp_metadata; + DROP TABLE _test_tmp_builds; + SQL + end + + let_it_be(:destination_root) { File.expand_path("../tmp", __dir__) } + + let(:generator_config) { { destination_root: destination_root } } + let(:generator_args) { ['--source', '_test_tmp_metadata', '--target', '_test_tmp_builds', '--database', 'main'] } + + context 'without foreign keys' do + let(:generator_args) { ['--source', '_test_tmp_metadata', '--target', 'projects', '--database', 'main'] } + + it 'does not generate migrations' do + output = capture(:stderr) { run_generator } + + expect(migrations).to be_empty + expect(output).to match(/No FK found between _test_tmp_metadata and projects/) + end + end + + context 'with one FK' do + it 'generates foreign key migrations' do + run_generator + + expect(migrations.sort_by(&:version).map(&:name)).to eq(%w[ + AddFkIndexToTestTmpMetadataOnPartitionIdAndBuildsId + AddFkToTestTmpMetadataOnPartitionIdAndBuildsId + ValidateFkOnTestTmpMetadataPartitionIdAndBuildsId + RemoveFkToTestTmpBuildsTestTmpMetadataOnBuildsId + ]) + + schema_migrate_up! + + fks = Gitlab::Database::PostgresForeignKey + .by_referenced_table_identifier('public._test_tmp_builds') + .by_constrained_table_identifier('public._test_tmp_metadata') + + expect(fks.size).to eq(1) + + foreign_key = fks.first + + expect(foreign_key.name).to end_with('_p') + expect(foreign_key.constrained_columns).to eq(%w[partition_id builds_id]) + expect(foreign_key.referenced_columns).to eq(%w[partition_id id]) + expect(foreign_key.on_delete_action).to eq('cascade') + expect(foreign_key.on_update_action).to eq('cascade') + + index = active_record_base.connection.indexes('_test_tmp_metadata').find do |index| + index.columns == %w[partition_id builds_id] + end + + expect(index).to be_present + end + end + + context 'with many FKs' do + before do + ActiveRecord::Schema.define do + add_reference :_test_tmp_metadata, :job, + foreign_key: { to_table: :_test_tmp_builds, on_delete: :cascade } + end + end + + it 'generates migrations for the selected FK' do + expect(Thor::LineEditor) + .to receive(:readline) + .with('Please select one: [0, 1] (0) ', { default: '0', limited_to: %w[0 1] }) + .and_return('1') + + run_generator + + expect(migrations.sort_by(&:version).map(&:name)).to eq(%w[ + AddFkIndexToTestTmpMetadataOnPartitionIdAndJobId + AddFkToTestTmpMetadataOnPartitionIdAndJobId + ValidateFkOnTestTmpMetadataPartitionIdAndJobId + RemoveFkToTestTmpBuildsTestTmpMetadataOnJobId + ]) + end + end + + def run_generator(args = generator_args, config = generator_config) + described_class.start(args, config) + end + + # We want to execute only the newly generated migrations + def migrations_paths + [File.join(destination_root, 'db', 'post_migrate')] + end + + # There is no need to migrate down before executing the tests because these + # migrations were not already executed and we don't need to run it after + # the tests because we're removing the tables. + def schema_migrate_down! + # no-op + end +end diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder_spec.rb index bf2f8d8159b..bd6af4269b4 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder d let_it_be(:issue_outside_project) { create(:issue) } let_it_be(:stage) do - create(:cycle_analytics_project_stage, + create(:cycle_analytics_stage, project: project, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb index 7e36d89a2a1..aa0a1b66eef 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do let_it_be(:issue_2) { create(:issue, project: project) } let_it_be(:issue_3) { create(:issue, project: project) } - let_it_be(:stage) { create(:cycle_analytics_project_stage, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production, project: project) } + let_it_be(:stage) { create(:cycle_analytics_stage, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production, namespace: project.reload.project_namespace) } let_it_be(:stage_event_1) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_1.id, start_event_timestamp: 2.years.ago, end_event_timestamp: 1.year.ago) } # duration: 1 year let_it_be(:stage_event_2) { create(:cycle_analytics_issue_stage_event, stage_event_hash_id: stage.stage_event_hash_id, project_id: project.id, issue_id: issue_2.id, start_event_timestamp: 5.years.ago, end_event_timestamp: 2.years.ago) } # duration: 3 years diff --git a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb index e2fdd4918d5..de325454b34 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/average_spec.rb @@ -21,7 +21,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Average do let(:stage) do build( - :cycle_analytics_project_stage, + :cycle_analytics_stage, start_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::IssueCreated.identifier, end_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::IssueFirstMentionedInCommit.identifier, project: project diff --git a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb index 271022e7c55..59b2bacea50 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb @@ -10,10 +10,10 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do let(:params) { { current_user: user } } let(:records) do - stage = build(:cycle_analytics_project_stage, { + stage = build(:cycle_analytics_stage, { start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged, - project: project + namespace: project.reload.project_namespace }) described_class.new(stage: stage, params: params).build.to_a end @@ -25,6 +25,14 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do freeze_time end + context 'when an unknown parent class is given' do + it 'raises error' do + stage = instance_double('Analytics::CycleAnalytics::Stage', parent: Issue.new) + + expect { described_class.new(stage: stage) }.to raise_error(/unknown parent_class: Issue/) + end + end + describe 'date range parameters' do context 'when filters by only the `from` parameter' do before do diff --git a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb index 4db5d64164e..39cfab49f54 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb @@ -9,10 +9,10 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do let(:stage) do build( - :cycle_analytics_project_stage, + :cycle_analytics_stage, start_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestCreated.identifier, end_event_identifier: Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestMerged.identifier, - project: project + namespace: project.reload.project_namespace ) end diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb index 7f70a4cfc4e..e9a9dfeca82 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb @@ -58,7 +58,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do let_it_be(:issue2) { create(:issue, project: project, confidential: true) } let(:stage) do - build(:cycle_analytics_project_stage, { + build(:cycle_analytics_stage, { start_event_identifier: :plan_stage_start, end_event_identifier: :issue_first_mentioned_in_commit, project: project @@ -88,7 +88,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do let(:mr1) { create(:merge_request, created_at: 5.days.ago, source_project: project, allow_broken: true) } let(:mr2) { create(:merge_request, created_at: 4.days.ago, source_project: project, allow_broken: true) } let(:stage) do - build(:cycle_analytics_project_stage, { + build(:cycle_analytics_stage, { start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged, project: project @@ -110,10 +110,10 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do let_it_be(:issue3) { create(:issue, project: project) } let(:stage) do - build(:cycle_analytics_project_stage, { + build(:cycle_analytics_stage, { start_event_identifier: :plan_stage_start, end_event_identifier: :issue_first_mentioned_in_commit, - project: project + namespace: project.project_namespace }) end diff --git a/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb index daf85ea379a..d0ada3d195b 100644 --- a/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb +++ b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Analytics::CycleAnalytics::Sorting do - let(:stage) { build(:cycle_analytics_project_stage, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) } + let(:stage) { build(:cycle_analytics_stage, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) } subject(:order_values) { described_class.new(query: MergeRequest.joins(:metrics), stage: stage).apply(sort, direction).order_values } diff --git a/spec/lib/gitlab/api_authentication/token_resolver_spec.rb b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb index 9f86b95651a..c0c8e7aba63 100644 --- a/spec/lib/gitlab/api_authentication/token_resolver_spec.rb +++ b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::APIAuthentication::TokenResolver do +RSpec.describe Gitlab::APIAuthentication::TokenResolver, feature_category: :authentication_and_authorization do let_it_be(:user) { create(:user) } let_it_be(:project, reload: true) { create(:project, :public) } let_it_be(:personal_access_token) { create(:personal_access_token, user: user) } @@ -115,9 +115,9 @@ RSpec.describe Gitlab::APIAuthentication::TokenResolver do it_behaves_like 'an unauthorized request' end - context 'when the external_authorization_service is enabled' do + context 'when the the deploy token is restricted with external_authorization' do before do - stub_application_setting(external_authorization_service_enabled: true) + allow(Gitlab::ExternalAuthorization).to receive(:allow_deploy_tokens_and_deploy_keys?).and_return(false) end context 'with a valid deploy token' do diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb index 3d9076c6fa7..cb9d1e9eae8 100644 --- a/spec/lib/gitlab/asciidoc_spec.rb +++ b/spec/lib/gitlab/asciidoc_spec.rb @@ -855,8 +855,8 @@ module Gitlab end end - def render(*args) - described_class.render(*args) + def render(...) + described_class.render(...) end end end diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb index 484b4702497..6aedd0a0a23 100644 --- a/spec/lib/gitlab/auth/auth_finders_spec.rb +++ b/spec/lib/gitlab/auth/auth_finders_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Auth::AuthFinders do +RSpec.describe Gitlab::Auth::AuthFinders, feature_category: :authentication_and_authorization do include described_class include HttpBasicAuthHelpers @@ -390,9 +390,9 @@ RSpec.describe Gitlab::Auth::AuthFinders do end end - context 'when the external_authorization_service is enabled' do + context 'when the the deploy token is restricted with external_authorization' do before do - stub_application_setting(external_authorization_service_enabled: true) + allow(Gitlab::ExternalAuthorization).to receive(:allow_deploy_tokens_and_deploy_keys?).and_return(false) set_header(described_class::DEPLOY_TOKEN_HEADER, deploy_token.token) end @@ -470,7 +470,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError) end - context 'no feed, API or archive requests' do + context 'no feed, API, archive or download requests' do it 'returns nil if the request is not RSS' do expect(find_user_from_web_access_token(:rss)).to be_nil end @@ -486,6 +486,10 @@ RSpec.describe Gitlab::Auth::AuthFinders do it 'returns nil if the request is not ARCHIVE' do expect(find_user_from_web_access_token(:archive)).to be_nil end + + it 'returns nil if the request is not DOWNLOAD' do + expect(find_user_from_web_access_token(:download)).to be_nil + end end it 'returns the user for RSS requests' do @@ -506,6 +510,12 @@ RSpec.describe Gitlab::Auth::AuthFinders do expect(find_user_from_web_access_token(:archive)).to eq(user) end + it 'returns the user for DOWNLOAD requests' do + set_header('SCRIPT_NAME', '/-/1.0.0/downloads/main.zip') + + expect(find_user_from_web_access_token(:download)).to eq(user) + end + context 'for API requests' do it 'returns the user' do set_header('SCRIPT_NAME', '/api/endpoint') diff --git a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb index 3d9be4c3489..c5703cf5c24 100644 --- a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb +++ b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb @@ -19,7 +19,7 @@ RSpec.describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_cachin before do stub_rack_attack_setting(options) - Rack::Attack.reset! + Gitlab::Redis::RateLimiting.with(&:flushdb) Rack::Attack.clear_configuration Gitlab::RackAttack.configure(Rack::Attack) end diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb index beeb3ca7011..a5cbad74829 100644 --- a/spec/lib/gitlab/auth/o_auth/user_spec.rb +++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb @@ -418,25 +418,54 @@ RSpec.describe Gitlab::Auth::OAuth::User, feature_category: :authentication_and_ end context "and LDAP user has an account already" do - let!(:existing_user) { create(:omniauth_user, name: 'John Doe', email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') } + context 'when sync_name is disabled' do + before do + allow(Gitlab.config.ldap).to receive(:sync_name).and_return(false) + end - it "adds the omniauth identity to the LDAP account" do - allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user) + let!(:existing_user) { create(:omniauth_user, name: 'John Doe', email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') } - oauth_user.save # rubocop:disable Rails/SaveBang + it "adds the omniauth identity to the LDAP account" do + allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user) - expect(gl_user).to be_valid - expect(gl_user.username).to eql 'john' - expect(gl_user.name).to eql 'John Doe' - expect(gl_user.email).to eql 'john@example.com' - expect(gl_user.identities.length).to be 2 - identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } } - expect(identities_as_hash).to match_array( - [ - { provider: 'ldapmain', extern_uid: dn }, - { provider: 'twitter', extern_uid: uid } - ] - ) + oauth_user.save # rubocop:disable Rails/SaveBang + + expect(gl_user).to be_valid + expect(gl_user.username).to eql 'john' + expect(gl_user.name).to eql 'John Doe' + expect(gl_user.email).to eql 'john@example.com' + expect(gl_user.identities.length).to be 2 + identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } } + expect(identities_as_hash).to match_array( + [ + { provider: 'ldapmain', extern_uid: dn }, + { provider: 'twitter', extern_uid: uid } + ] + ) + end + end + + context 'when sync_name is enabled' do + let!(:existing_user) { create(:omniauth_user, name: 'John Swift', email: 'john@example.com', extern_uid: dn, provider: 'ldapmain', username: 'john') } + + it "adds the omniauth identity to the LDAP account" do + allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user) + + oauth_user.save # rubocop:disable Rails/SaveBang + + expect(gl_user).to be_valid + expect(gl_user.username).to eql 'john' + expect(gl_user.name).to eql 'John Swift' + expect(gl_user.email).to eql 'john@example.com' + expect(gl_user.identities.length).to be 2 + identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } } + expect(identities_as_hash).to match_array( + [ + { provider: 'ldapmain', extern_uid: dn }, + { provider: 'twitter', extern_uid: uid } + ] + ) + end end end diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb index 5a6fa7c416b..a5f46aa1f35 100644 --- a/spec/lib/gitlab/auth_spec.rb +++ b/spec/lib/gitlab/auth_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do +RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_category: :authentication_and_authorization do let_it_be(:project) { create(:project) } let(:auth_failure) { { actor: nil, project: nil, type: nil, authentication_abilities: nil } } @@ -14,7 +14,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do end it 'ADMIN_SCOPES contains all scopes for ADMIN access' do - expect(subject::ADMIN_SCOPES).to match_array %i[sudo] + expect(subject::ADMIN_SCOPES).to match_array %i[sudo admin_mode] end it 'REPOSITORY_SCOPES contains all scopes for REPOSITORY access' do @@ -28,19 +28,13 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do it 'DEFAULT_SCOPES contains all default scopes' do expect(subject::DEFAULT_SCOPES).to match_array [:api] end - - it 'optional_scopes contains all non-default scopes' do - stub_container_registry_config(enabled: true) - - expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo openid profile email] - end end context 'available_scopes' do it 'contains all non-default scopes' do stub_container_registry_config(enabled: true) - expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo] + expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode] end it 'contains for non-admin user all non-default scopes without ADMIN access' do @@ -54,7 +48,38 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do stub_container_registry_config(enabled: true) user = create(:user, admin: true) - expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo] + expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode] + end + + it 'optional_scopes contains all non-default scopes' do + stub_container_registry_config(enabled: true) + + expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email] + end + + context 'with feature flag disabled' do + before do + stub_feature_flags(admin_mode_for_api: false) + end + + it 'contains all non-default scopes' do + stub_container_registry_config(enabled: true) + + expect(subject.all_available_scopes).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode] + end + + it 'contains for admin user all non-default scopes with ADMIN access' do + stub_container_registry_config(enabled: true) + user = create(:user, admin: true) + + expect(subject.available_scopes_for(user)).to match_array %i[api read_user read_api read_repository write_repository read_registry write_registry sudo] + end + + it 'optional_scopes contains all non-default scopes' do + stub_container_registry_config(enabled: true) + + expect(subject.optional_scopes).to match_array %i[read_user read_api read_repository write_repository read_registry write_registry sudo admin_mode openid profile email] + end end context 'registry_scopes' do diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb deleted file mode 100644 index 96adea03d43..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb +++ /dev/null @@ -1,65 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20210602155110 do - let!(:jira_integration_temp) { described_class::JiraServiceTemp } - let!(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp } - let!(:atlassian_host) { 'https://api.atlassian.net' } - let!(:mixedcase_host) { 'https://api.AtlassiaN.nEt' } - let!(:server_host) { 'https://my.server.net' } - - let(:jira_integration) { jira_integration_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') } - - subject { described_class.new } - - def create_tracker_data(options = {}) - jira_tracker_data_temp.create!({ service_id: jira_integration.id }.merge(options)) - end - - describe '#perform' do - context do - it 'ignores if deployment already set' do - tracker_data = create_tracker_data(url: atlassian_host, deployment_type: 'server') - - expect(subject).not_to receive(:collect_deployment_type) - - subject.perform(tracker_data.id, tracker_data.id) - - expect(tracker_data.reload.deployment_type).to eq 'server' - end - - it 'ignores if no url is set' do - tracker_data = create_tracker_data(deployment_type: 'unknown') - - expect(subject).to receive(:collect_deployment_type) - - subject.perform(tracker_data.id, tracker_data.id) - - expect(tracker_data.reload.deployment_type).to eq 'unknown' - end - end - - context 'when tracker is valid' do - let!(:tracker_1) { create_tracker_data(url: atlassian_host, deployment_type: 0) } - let!(:tracker_2) { create_tracker_data(url: mixedcase_host, deployment_type: 0) } - let!(:tracker_3) { create_tracker_data(url: server_host, deployment_type: 0) } - let!(:tracker_4) { create_tracker_data(api_url: server_host, deployment_type: 0) } - let!(:tracker_nextbatch) { create_tracker_data(api_url: atlassian_host, deployment_type: 0) } - - it 'sets the proper deployment_type', :aggregate_failures do - subject.perform(tracker_1.id, tracker_4.id) - - expect(tracker_1.reload.deployment_cloud?).to be_truthy - expect(tracker_2.reload.deployment_cloud?).to be_truthy - expect(tracker_3.reload.deployment_server?).to be_truthy - expect(tracker_4.reload.deployment_server?).to be_truthy - expect(tracker_nextbatch.reload.deployment_unknown?).to be_truthy - end - end - - it_behaves_like 'marks background migration job records' do - let(:arguments) { [1, 4] } - end - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb index 15956d2ea80..876eb070745 100644 --- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210826171758 do let(:namespaces_table) { table(:namespaces) } let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) } diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb index 019c6d54068..ad9b54608c6 100644 --- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210826171758 do let(:namespaces_table) { table(:namespaces) } let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) } diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb index 8d5aa6236a7..80fd86e90bb 100644 --- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 20210826171758, +feature_category: :source_code_management do let(:gitlab_shell) { Gitlab::Shell.new } let(:users) { table(:users) } let(:snippets) { table(:snippets) } diff --git a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb index b084e3fe885..7142aea3ab2 100644 --- a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillUpvotesCountOnIssues, schema: 20210701111909 do +RSpec.describe Gitlab::BackgroundMigration::BackfillUpvotesCountOnIssues, schema: 20210826171758 do let(:award_emoji) { table(:award_emoji) } let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb index 0d9d9eb929c..5ffe665f0ad 100644 --- a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb +++ b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210826171758 do let(:lfs_objects_projects) { table(:lfs_objects_projects) } let(:lfs_objects) { table(:lfs_objects) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb index c4039b85459..8f058c875a2 100644 --- a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb +++ b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migration, schema: 20210617161348 do +RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migration, schema: 20210826171758 do let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } let!(:project) { table(:projects).create!(namespace_id: namespace.id) } let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) } @@ -10,17 +10,14 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migratio before do create_deployment!(environment.id, project.id) - create_deployment!(non_existing_record_id, project.id) end it 'deletes only orphaned deployments' do expect(valid_deployments.pluck(:id)).not_to be_empty - expect(orphaned_deployments.pluck(:id)).not_to be_empty subject.perform(table(:deployments).minimum(:id), table(:deployments).maximum(:id)) expect(valid_deployments.pluck(:id)).not_to be_empty - expect(orphaned_deployments.pluck(:id)).to be_empty end it 'marks jobs as done' do @@ -29,15 +26,9 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migratio arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)] ) - second_job = background_migration_jobs.create!( - class_name: 'DeleteOrphanedDeployments', - arguments: [table(:deployments).maximum(:id), table(:deployments).maximum(:id)] - ) - subject.perform(table(:deployments).minimum(:id), table(:deployments).minimum(:id)) expect(first_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded]) - expect(second_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:pending]) end private diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb index 66e16b16270..8f3ef44e00c 100644 --- a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb +++ b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210826171758 do let!(:background_migration_jobs) { table(:background_migration_jobs) } let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } let!(:users) { table(:users) } diff --git a/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb new file mode 100644 index 00000000000..b52f30a5e21 --- /dev/null +++ b/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::EncryptCiTriggerToken, feature_category: :continuous_integration do + let(:ci_triggers) do + table(:ci_triggers, database: :ci) do |ci_trigger| + ci_trigger.send :attr_encrypted, :encrypted_token_tmp, + attribute: :encrypted_token, + mode: :per_attribute_iv, + key: ::Settings.attr_encrypted_db_key_base_32, + algorithm: 'aes-256-gcm', + encode: false, + encode_iv: false + end + end + + let(:without_encryption) { ci_triggers.create!(token: "token", owner_id: 1) } + let(:without_encryption_2) { ci_triggers.create!(token: "token 2", owner_id: 1) } + let(:with_encryption) { ci_triggers.create!(token: 'token 3', owner_id: 1, encrypted_token_tmp: 'token 3') } + + let(:start_id) { ci_triggers.minimum(:id) } + let(:end_id) { ci_triggers.maximum(:id) } + + let(:migration_attrs) do + { + start_id: start_id, + end_id: end_id, + batch_table: :ci_triggers, + batch_column: :id, + sub_batch_size: 1, + pause_ms: 0, + connection: Ci::ApplicationRecord.connection + } + end + + it 'ensures all unencrypted tokens are encrypted' do + expect(without_encryption.encrypted_token).to eq(nil) + expect(without_encryption_2.encrypted_token).to eq(nil) + expect(with_encryption.encrypted_token).not_to be(nil) + + described_class.new(**migration_attrs).perform + + updated_triggers = [without_encryption, without_encryption_2] + updated_triggers.each do |stale_trigger| + db_trigger = Ci::Trigger.find(stale_trigger.id) + expect(db_trigger.encrypted_token).not_to be(nil) + expect(db_trigger.encrypted_token_iv).not_to be(nil) + expect(db_trigger.token).to eq(db_trigger.encrypted_token_tmp) + end + + already_encrypted_token = Ci::Trigger.find(with_encryption.id) + expect(already_encrypted_token.encrypted_token).to eq(with_encryption.encrypted_token) + expect(already_encrypted_token.encrypted_token_iv).to eq(with_encryption.encrypted_token_iv) + expect(with_encryption.token).to eq(with_encryption.encrypted_token_tmp) + end +end diff --git a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb index 51a09d50a19..586e75ffb37 100644 --- a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb +++ b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTable, - :suppress_gitlab_schemas_validate_connection, schema: 20210730104800 do + :suppress_gitlab_schemas_validate_connection, schema: 20210826171758 do it 'correctly extracts project topics into separate table' do namespaces = table(:namespaces) projects = table(:projects) diff --git a/spec/lib/gitlab/background_migration/fix_incoherent_packages_size_on_project_statistics_spec.rb b/spec/lib/gitlab/background_migration/fix_incoherent_packages_size_on_project_statistics_spec.rb new file mode 100644 index 00000000000..f71b54a7eb4 --- /dev/null +++ b/spec/lib/gitlab/background_migration/fix_incoherent_packages_size_on_project_statistics_spec.rb @@ -0,0 +1,242 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# rubocop: disable RSpec/MultipleMemoizedHelpers +RSpec.describe Gitlab::BackgroundMigration::FixIncoherentPackagesSizeOnProjectStatistics, + feature_category: :package_registry do + let(:project_statistics_table) { table(:project_statistics) } + let(:packages_table) { table(:packages_packages) } + let(:package_files_table) { table(:packages_package_files) } + let(:projects_table) { table(:projects) } + let(:namespaces_table) { table(:namespaces) } + + let!(:group) { namespaces_table.create!(name: 'group', path: 'group', type: 'Group') } + + let!(:project_1_namespace) do + namespaces_table.create!(name: 'project1', path: 'project1', type: 'Project', parent_id: group.id) + end + + let!(:project_2_namespace) do + namespaces_table.create!(name: 'project2', path: 'project2', type: 'Project', parent_id: group.id) + end + + let!(:project_3_namespace) do + namespaces_table.create!(name: 'project3', path: 'project3', type: 'Project', parent_id: group.id) + end + + let!(:project_4_namespace) do + namespaces_table.create!(name: 'project4', path: 'project4', type: 'Project', parent_id: group.id) + end + + let!(:project_1) do + projects_table.create!( + namespace_id: group.id, + name: 'project1', + path: 'project1', + project_namespace_id: project_1_namespace.id + ) + end + + let!(:project_2) do + projects_table.create!( + namespace_id: group.id, + name: 'project2', + path: 'project2', + project_namespace_id: project_2_namespace.id + ) + end + + let!(:project_3) do + projects_table.create!( + namespace_id: group.id, + name: 'project3', + path: 'project3', + project_namespace_id: project_3_namespace.id + ) + end + + let!(:project_4) do + projects_table.create!( + namespace_id: group.id, + name: 'project4', + path: 'project4', + project_namespace_id: project_4_namespace.id + ) + end + + let!(:coherent_non_zero_statistics) do + project_statistics_table.create!(namespace_id: group.id, project_id: project_1.id, packages_size: 200) + end + + let!(:incoherent_non_zero_statistics) do + project_statistics_table.create!(namespace_id: group.id, project_id: project_2.id, packages_size: 5) + end + + let!(:coherent_zero_statistics) do + project_statistics_table.create!(namespace_id: group.id, project_id: project_4.id, packages_size: 0) + end + + let!(:incoherent_zero_statistics) do + project_statistics_table.create!(namespace_id: group.id, project_id: project_3.id, packages_size: 0) + end + + let!(:package_1) do + packages_table.create!(project_id: project_1.id, name: 'test1', version: '1.2.3', package_type: 2) + end + + let!(:package_2) do + packages_table.create!(project_id: project_2.id, name: 'test2', version: '1.2.3', package_type: 2) + end + + let!(:package_3) do + packages_table.create!(project_id: project_2.id, name: 'test3', version: '1.2.3', package_type: 2) + end + + let!(:package_4) do + packages_table.create!(project_id: project_3.id, name: 'test4', version: '1.2.3', package_type: 2) + end + + let!(:package_5) do + packages_table.create!(project_id: project_3.id, name: 'test5', version: '1.2.3', package_type: 2) + end + + let!(:package_file_1_1) do + package_files_table.create!(package_id: package_1.id, file_name: 'test.txt', file: 'test', size: 100) + end + + let!(:package_file_1_2) do + package_files_table.create!(package_id: package_1.id, file_name: 'test.txt', file: 'test', size: 100) + end + + let!(:package_file_2_1) do + package_files_table.create!(package_id: package_2.id, file_name: 'test.txt', file: 'test', size: 100) + end + + let!(:package_file_3_1) do + package_files_table.create!(package_id: package_3.id, file_name: 'test.txt', file: 'test', size: 100) + end + + let!(:package_file_4_1) do + package_files_table.create!(package_id: package_4.id, file_name: 'test.txt', file: 'test', size: 100) + end + + let!(:package_file_5_1) do + package_files_table.create!(package_id: package_5.id, file_name: 'test.txt', file: 'test', size: 100) + end + + let(:migration) do + described_class.new( + start_id: project_statistics_table.minimum(:id), + end_id: project_statistics_table.maximum(:id), + batch_table: :project_statistics, + batch_column: :id, + sub_batch_size: 1000, + pause_ms: 0, + connection: ApplicationRecord.connection + ) + end + + describe '#filter_batch' do + it 'selects all package size statistics' do + expected = project_statistics_table.pluck(:id) + actual = migration.filter_batch(project_statistics_table).pluck(:id) + + expect(actual).to match_array(expected) + end + end + + describe '#perform', :aggregate_failures, :clean_gitlab_redis_cache do + subject(:perform) { migration.perform } + + shared_examples 'not updating project statistics' do + it 'does not change them' do + expect(FlushCounterIncrementsWorker).not_to receive(:perform_in) + expect { perform } + .to not_change { incoherent_non_zero_statistics.reload.packages_size } + .and not_change { coherent_non_zero_statistics.reload.packages_size } + .and not_change { incoherent_zero_statistics.reload.packages_size } + .and not_change { coherent_zero_statistics.reload.packages_size } + expect_buffered_update(incoherent_non_zero_statistics, 0) + expect_buffered_update(incoherent_zero_statistics, 0) + end + end + + shared_examples 'enqueuing a buffered updates' do |updates| + it 'fixes the packages_size stat' do + updates_for_stats = updates.deep_transform_keys { |k| public_send(k) } + updates_for_stats.each do |stat, amount| + expect(FlushCounterIncrementsWorker) + .to receive(:perform_in).with( + ::Gitlab::Counters::BufferedCounter::WORKER_DELAY, + 'ProjectStatistics', + stat.id, + :packages_size + ) + + expect(::Gitlab::BackgroundMigration::Logger) + .to receive(:info).with( + migrator: described_class::MIGRATOR, + project_id: stat.project_id, + old_size: stat.packages_size, + new_size: stat.packages_size + amount + ) + end + + expect { perform } + .to not_change { incoherent_non_zero_statistics.reload.packages_size } + .and not_change { coherent_non_zero_statistics.reload.packages_size } + .and not_change { incoherent_zero_statistics.reload.packages_size } + .and not_change { coherent_zero_statistics.reload.packages_size } + + updates_for_stats.each do |stat, amount| + expect_buffered_update(stat, amount) + end + end + end + + context 'with incoherent packages_size' do + it_behaves_like 'enqueuing a buffered updates', + incoherent_non_zero_statistics: 195, + incoherent_zero_statistics: 200 + + context 'with updates waiting on redis' do + before do + insert_packages_size_update(incoherent_non_zero_statistics, -50) + insert_packages_size_update(incoherent_zero_statistics, -50) + end + + it_behaves_like 'enqueuing a buffered updates', + incoherent_non_zero_statistics: 195, + incoherent_zero_statistics: 200 + end + end + + context 'with no incoherent packages_size' do + before do + incoherent_non_zero_statistics.update!(packages_size: 200) + incoherent_zero_statistics.update!(packages_size: 200) + end + + it_behaves_like 'not updating project statistics' + end + + def insert_packages_size_update(stat, amount) + Gitlab::Redis::SharedState.with do |redis| + redis.set(redis_key(stat), amount) + end + end + + def expect_buffered_update(stat, expected) + amount = Gitlab::Redis::SharedState.with do |redis| + redis.get(redis_key(stat)).to_i + end + expect(amount).to eq(expected) + end + + def redis_key(stats) + "project:{#{stats.project_id}}:counters:ProjectStatistics:#{stats.id}:packages_size" + end + end +end +# rubocop: enable RSpec/MultipleMemoizedHelpers diff --git a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb index 4d7c836cff4..b252df4ecff 100644 --- a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics, - :suppress_gitlab_schemas_validate_connection, schema: 20210602155110 do + :suppress_gitlab_schemas_validate_connection, schema: 20210826171758 do it 'correctly migrates project taggings context from tags to topics' do taggings = table(:taggings) diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb index fe45eaac3b7..08fde0d0ff4 100644 --- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require 'webauthn/u2f_migrator' -RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210826171758 do let(:users) { table(:users) } let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) } diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb index cafddb6aeaf..71cf58a933f 100644 --- a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb +++ b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210826171758 do let(:enabled) { 20 } let(:disabled) { 0 } diff --git a/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb b/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb new file mode 100644 index 00000000000..a8574411957 --- /dev/null +++ b/spec/lib/gitlab/background_migration/nullify_creator_id_column_of_orphaned_projects_spec.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::NullifyCreatorIdColumnOfOrphanedProjects, feature_category: :projects do + let(:users) { table(:users) } + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + + let(:user_1) { users.create!(name: 'user_1', email: 'user_1@example.com', projects_limit: 4) } + let(:user_2) { users.create!(name: 'user_2', email: 'user_2@example.com', projects_limit: 4) } + let(:user_3) { users.create!(name: 'user_3', email: 'user_3@example.com', projects_limit: 4) } + + let!(:group) do + namespaces.create!( + name: 'Group1', type: 'Group', path: 'space1' + ) + end + + let!(:project_namespace_1) do + namespaces.create!( + name: 'project_1', path: 'project_1', type: 'Project' + ) + end + + let!(:project_namespace_2) do + namespaces.create!( + name: 'project_2', path: 'project_2', type: 'Project' + ) + end + + let!(:project_namespace_3) do + namespaces.create!( + name: 'project_3', path: 'project_3', type: 'Project' + ) + end + + let!(:project_namespace_4) do + namespaces.create!( + name: 'project_4', path: 'project_4', type: 'Project' + ) + end + + let!(:project_1) do + projects.create!( + name: 'project_1', path: 'project_1', namespace_id: group.id, project_namespace_id: project_namespace_1.id, + creator_id: user_1.id + ) + end + + let!(:project_2) do + projects.create!( + name: 'project_2', path: 'project_2', namespace_id: group.id, project_namespace_id: project_namespace_2.id, + creator_id: user_2.id + ) + end + + let!(:project_3) do + projects.create!( + name: 'project_3', path: 'project_3', namespace_id: group.id, project_namespace_id: project_namespace_3.id, + creator_id: user_3.id + ) + end + + let!(:project_4) do + projects.create!( + name: 'project_4', path: 'project_4', namespace_id: group.id, project_namespace_id: project_namespace_4.id, + creator_id: nil + ) + end + + subject do + described_class.new( + start_id: project_1.id, + end_id: project_4.id, + batch_table: :projects, + batch_column: :id, + sub_batch_size: 1, + pause_ms: 0, + connection: ApplicationRecord.connection + ).perform + end + + it 'nullifies the `creator_id` column of projects whose creators do not exist' do + # `delete` `user_3` so that the creator of `project_3` is removed, without invoking `dependent: :nullify` on `User` + user_3.delete + + expect { subject }.to change { projects.where(creator_id: nil).count }.from(1).to(2) + end +end diff --git a/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb b/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb new file mode 100644 index 00000000000..195e57e4e59 --- /dev/null +++ b/spec/lib/gitlab/background_migration/rebalance_partition_id_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::RebalancePartitionId, + :migration, + schema: 20230125093723, + feature_category: :continuous_integration do + let(:ci_builds_table) { table(:ci_builds, database: :ci) } + let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) } + + let!(:valid_ci_pipeline) { ci_pipelines_table.create!(id: 1, partition_id: 100) } + let!(:invalid_ci_pipeline) { ci_pipelines_table.create!(id: 2, partition_id: 101) } + + describe '#perform' do + using RSpec::Parameterized::TableSyntax + + where(:table_name, :invalid_record, :valid_record) do + :ci_pipelines | invalid_ci_pipeline | valid_ci_pipeline + end + + subject(:perform) do + described_class.new( + start_id: 1, + end_id: 2, + batch_table: table_name, + batch_column: :id, + sub_batch_size: 1, + pause_ms: 0, + connection: Ci::ApplicationRecord.connection + ).perform + end + + shared_examples 'fix invalid records' do + it 'rebalances partition_id to 100 when partition_id is 101' do + expect { perform } + .to change { invalid_record.reload.partition_id }.from(101).to(100) + .and not_change { valid_record.reload.partition_id } + end + end + + with_them do + it_behaves_like 'fix invalid records' + end + end +end diff --git a/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb b/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb deleted file mode 100644 index a19a3760958..00000000000 --- a/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb +++ /dev/null @@ -1,102 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::SanitizeConfidentialTodos, :migration, feature_category: :team_planning do - let!(:issue_type_id) { table(:work_item_types).find_by(base_type: 0).id } - - let(:todos) { table(:todos) } - let(:notes) { table(:notes) } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:project_features) { table(:project_features) } - let(:users) { table(:users) } - let(:issues) { table(:issues) } - let(:members) { table(:members) } - let(:project_authorizations) { table(:project_authorizations) } - - let(:user) { users.create!(first_name: 'Test', last_name: 'User', email: 'test@user.com', projects_limit: 1) } - let(:project_namespace1) { namespaces.create!(path: 'pns1', name: 'pns1') } - let(:project_namespace2) { namespaces.create!(path: 'pns2', name: 'pns2') } - - let(:project1) do - projects.create!(namespace_id: project_namespace1.id, - project_namespace_id: project_namespace1.id, visibility_level: 20) - end - - let(:project2) do - projects.create!(namespace_id: project_namespace2.id, - project_namespace_id: project_namespace2.id) - end - - let(:issue1) do - issues.create!( - project_id: project1.id, namespace_id: project_namespace1.id, issue_type: 1, title: 'issue1', author_id: user.id, - work_item_type_id: issue_type_id - ) - end - - let(:issue2) do - issues.create!( - project_id: project2.id, namespace_id: project_namespace2.id, issue_type: 1, title: 'issue2', - work_item_type_id: issue_type_id - ) - end - - let(:public_note) { notes.create!(note: 'text', project_id: project1.id) } - - let(:confidential_note) do - notes.create!(note: 'text', project_id: project1.id, confidential: true, - noteable_id: issue1.id, noteable_type: 'Issue') - end - - let(:other_confidential_note) do - notes.create!(note: 'text', project_id: project2.id, confidential: true, - noteable_id: issue2.id, noteable_type: 'Issue') - end - - let(:common_params) { { user_id: user.id, author_id: user.id, action: 1, state: 'pending', target_type: 'Note' } } - let!(:ignored_todo1) { todos.create!(**common_params) } - let!(:ignored_todo2) { todos.create!(**common_params, target_id: public_note.id, note_id: public_note.id) } - let!(:valid_todo) { todos.create!(**common_params, target_id: confidential_note.id, note_id: confidential_note.id) } - let!(:invalid_todo) do - todos.create!(**common_params, target_id: other_confidential_note.id, note_id: other_confidential_note.id) - end - - describe '#perform' do - before do - project_features.create!(project_id: project1.id, issues_access_level: 20, pages_access_level: 20) - members.create!(state: 0, source_id: project1.id, source_type: 'Project', - type: 'ProjectMember', user_id: user.id, access_level: 50, notification_level: 0, - member_namespace_id: project_namespace1.id) - project_authorizations.create!(project_id: project1.id, user_id: user.id, access_level: 50) - end - - subject(:perform) do - described_class.new( - start_id: notes.minimum(:id), - end_id: notes.maximum(:id), - batch_table: :notes, - batch_column: :id, - sub_batch_size: 1, - pause_ms: 0, - connection: ApplicationRecord.connection - ).perform - end - - it 'deletes todos where user can not read its note and logs deletion', :aggregate_failures do - expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger| - expect(logger).to receive(:info).with( - hash_including( - message: "#{described_class.name} deleting invalid todo", - attributes: hash_including(invalid_todo.attributes.slice(:id, :user_id, :target_id, :target_type)) - ) - ).once - end - - expect { perform }.to change(todos, :count).by(-1) - - expect(todos.all).to match_array([ignored_todo1, ignored_todo2, valid_todo]) - end - end -end diff --git a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb index 7261758e010..b8c3bf8f3ac 100644 --- a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb +++ b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210826171758 do let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } let!(:project1) { table(:projects).create!(namespace_id: namespace.id) } let!(:project2) { table(:projects).create!(namespace_id: namespace.id) } diff --git a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb b/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb index 4599491b580..f16ae489b78 100644 --- a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb +++ b/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::UpdateUsersWhereTwoFactorAuthRequiredFromGroup, :migration, schema: 20210602155110 do +RSpec.describe Gitlab::BackgroundMigration::UpdateUsersWhereTwoFactorAuthRequiredFromGroup, :migration, schema: 20210826171758 do include MigrationHelpers::NamespacesHelpers let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true) } diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb index f83ce01c617..1526a1a9f2d 100644 --- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb +++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BitbucketImport::Importer do +RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :integrations do include ImportSpecHelper before do diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb index ab4be5a909a..3cff2411054 100644 --- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb +++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BitbucketServerImport::Importer do +RSpec.describe Gitlab::BitbucketServerImport::Importer, feature_category: :importers do include ImportSpecHelper let(:import_url) { 'http://my-bitbucket' } diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb index 2dea0aef4cf..ec0bda3c300 100644 --- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb +++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do +RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache, feature_category: :continuous_integration do let_it_be(:project) { create(:project, :repository) } let(:pipeline_status) { described_class.new(project) } diff --git a/spec/lib/gitlab/cache/helpers_spec.rb b/spec/lib/gitlab/cache/helpers_spec.rb index 39d37e979b4..06131ee4546 100644 --- a/spec/lib/gitlab/cache/helpers_spec.rb +++ b/spec/lib/gitlab/cache/helpers_spec.rb @@ -33,10 +33,23 @@ RSpec.describe Gitlab::Cache::Helpers, :use_clean_rails_redis_caching do context 'single object' do let_it_be(:presentable) { create(:merge_request, source_project: project, source_branch: 'wip') } - it_behaves_like 'object cache helper' + context 'when presenter is a serializer' do + let(:expected_cache_key_prefix) { 'MergeRequestSerializer' } + + it_behaves_like 'object cache helper' + end + + context 'when presenter is a Grape::Entity' do + let(:presenter) { API::Entities::MergeRequest } + let(:expected_cache_key_prefix) { 'API::Entities::MergeRequest' } + + it_behaves_like 'object cache helper' + end end context 'collection of objects' do + let(:expected_cache_key_prefix) { 'MergeRequestSerializer' } + let_it_be(:presentable) do [ create(:merge_request, source_project: project, source_branch: 'fix'), @@ -46,5 +59,17 @@ RSpec.describe Gitlab::Cache::Helpers, :use_clean_rails_redis_caching do it_behaves_like 'collection cache helper' end + + context 'when passed presenter is not a serializer or an entity' do + let(:presenter) { User } + + let_it_be(:presentable) do + create(:merge_request, source_project: project, source_branch: 'master') + end + + it 'throws an exception' do + expect { subject }.to raise_exception(ArgumentError, "presenter User is not supported") + end + end end end diff --git a/spec/lib/gitlab/cache/metadata_spec.rb b/spec/lib/gitlab/cache/metadata_spec.rb new file mode 100644 index 00000000000..2e8af7a9c44 --- /dev/null +++ b/spec/lib/gitlab/cache/metadata_spec.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Cache::Metadata, feature_category: :source_code_management do + subject(:attributes) do + described_class.new( + caller_id: caller_id, + cache_identifier: cache_identifier, + feature_category: feature_category, + backing_resource: backing_resource + ) + end + + let(:caller_id) { 'caller-id' } + let(:cache_identifier) { 'ApplicationController#show' } + let(:feature_category) { :source_code_management } + let(:backing_resource) { :unknown } + + describe '#initialize' do + context 'when optional arguments are not set' do + before do + Gitlab::ApplicationContext.push(caller_id: 'context-id') + end + + it 'sets default value for them' do + attributes = described_class.new( + cache_identifier: cache_identifier, + feature_category: feature_category + ) + + expect(attributes.backing_resource).to eq(:unknown) + expect(attributes.caller_id).to eq('context-id') + end + end + + context 'when invalid feature category is set' do + let(:feature_category) { :not_supported } + + it { expect { attributes }.to raise_error(RuntimeError) } + + context 'when on production' do + before do + allow(Gitlab).to receive(:dev_or_test_env?).and_return(false) + end + + it 'does not raise an exception' do + expect { attributes }.not_to raise_error + expect(attributes.feature_category).to eq('unknown') + end + end + end + + context 'when backing resource is not supported' do + let(:backing_resource) { 'foo' } + + it { expect { attributes }.to raise_error(RuntimeError) } + + context 'when on production' do + before do + allow(Gitlab).to receive(:dev_or_test_env?).and_return(false) + end + + it 'does not raise an exception' do + expect { attributes }.not_to raise_error + end + end + end + end + + describe '#caller_id' do + subject { attributes.caller_id } + + it { is_expected.to eq caller_id } + end + + describe '#cache_identifier' do + subject { attributes.cache_identifier } + + it { is_expected.to eq cache_identifier } + end + + describe '#feature_category' do + subject { attributes.feature_category } + + it { is_expected.to eq feature_category } + end + + describe '#backing_resource' do + subject { attributes.backing_resource } + + it { is_expected.to eq backing_resource } + end +end diff --git a/spec/lib/gitlab/cache/metrics_spec.rb b/spec/lib/gitlab/cache/metrics_spec.rb index d8103837708..24b274f4209 100644 --- a/spec/lib/gitlab/cache/metrics_spec.rb +++ b/spec/lib/gitlab/cache/metrics_spec.rb @@ -3,8 +3,10 @@ require 'spec_helper' RSpec.describe Gitlab::Cache::Metrics do - subject(:metrics) do - described_class.new( + subject(:metrics) { described_class.new(metadata) } + + let(:metadata) do + Gitlab::Cache::Metadata.new( caller_id: caller_id, cache_identifier: cache_identifier, feature_category: feature_category, @@ -27,24 +29,6 @@ RSpec.describe Gitlab::Cache::Metrics do ).and_return(counter_mock) end - describe '#initialize' do - context 'when backing resource is not supported' do - let(:backing_resource) { 'foo' } - - it { expect { metrics }.to raise_error(RuntimeError) } - - context 'when on production' do - before do - allow(Gitlab).to receive(:dev_or_test_env?).and_return(false) - end - - it 'does not raise an exception' do - expect { metrics }.not_to raise_error - end - end - end - end - describe '#increment_cache_hit' do subject { metrics.increment_cache_hit } diff --git a/spec/lib/gitlab/chat/responder_spec.rb b/spec/lib/gitlab/chat/responder_spec.rb index 803f30da9e7..a9d290cb87c 100644 --- a/spec/lib/gitlab/chat/responder_spec.rb +++ b/spec/lib/gitlab/chat/responder_spec.rb @@ -2,30 +2,70 @@ require 'spec_helper' -RSpec.describe Gitlab::Chat::Responder do +RSpec.describe Gitlab::Chat::Responder, feature_category: :integrations do describe '.responder_for' do - context 'using a regular build' do - it 'returns nil' do - build = create(:ci_build) + context 'when the feature flag is disabled' do + before do + stub_feature_flags(use_response_url_for_chat_responder: false) + end + + context 'using a regular build' do + it 'returns nil' do + build = create(:ci_build) + + expect(described_class.responder_for(build)).to be_nil + end + end + + context 'using a chat build' do + it 'returns the responder for the build' do + pipeline = create(:ci_pipeline) + build = create(:ci_build, pipeline: pipeline) + integration = double(:integration, chat_responder: Gitlab::Chat::Responder::Slack) + chat_name = double(:chat_name, integration: integration) + chat_data = double(:chat_data, chat_name: chat_name) + + allow(pipeline) + .to receive(:chat_data) + .and_return(chat_data) - expect(described_class.responder_for(build)).to be_nil + expect(described_class.responder_for(build)) + .to be_an_instance_of(Gitlab::Chat::Responder::Slack) + end end end - context 'using a chat build' do - it 'returns the responder for the build' do - pipeline = create(:ci_pipeline) - build = create(:ci_build, pipeline: pipeline) - integration = double(:integration, chat_responder: Gitlab::Chat::Responder::Slack) - chat_name = double(:chat_name, integration: integration) - chat_data = double(:chat_data, chat_name: chat_name) + context 'when the feature flag is enabled' do + before do + stub_feature_flags(use_response_url_for_chat_responder: true) + end + + context 'using a regular build' do + it 'returns nil' do + build = create(:ci_build) + + expect(described_class.responder_for(build)).to be_nil + end + end + + context 'using a chat build' do + let(:chat_name) { create(:chat_name, chat_id: 'U123') } + let(:pipeline) do + pipeline = create(:ci_pipeline) + pipeline.create_chat_data!( + response_url: 'https://hooks.slack.com/services/12345', + chat_name_id: chat_name.id + ) + pipeline + end - allow(pipeline) - .to receive(:chat_data) - .and_return(chat_data) + let(:build) { create(:ci_build, pipeline: pipeline) } + let(:responder) { described_class.new(build) } - expect(described_class.responder_for(build)) - .to be_an_instance_of(Gitlab::Chat::Responder::Slack) + it 'returns the responder for the build' do + expect(described_class.responder_for(build)) + .to be_an_instance_of(Gitlab::Chat::Responder::Slack) + end end end end diff --git a/spec/lib/gitlab/ci/artifacts/logger_spec.rb b/spec/lib/gitlab/ci/artifacts/logger_spec.rb index 7753cb0d25e..7a2f8b6ea37 100644 --- a/spec/lib/gitlab/ci/artifacts/logger_spec.rb +++ b/spec/lib/gitlab/ci/artifacts/logger_spec.rb @@ -9,23 +9,27 @@ RSpec.describe Gitlab::Ci::Artifacts::Logger do describe '.log_created' do it 'logs information about created artifact' do - artifact = create(:ci_job_artifact, :archive) - - expect(Gitlab::AppLogger).to receive(:info).with( - hash_including( - message: 'Artifact created', - job_artifact_id: artifact.id, - size: artifact.size, - type: artifact.file_type, - build_id: artifact.job_id, - project_id: artifact.project_id, - 'correlation_id' => an_instance_of(String), - 'meta.feature_category' => 'test', - 'meta.caller_id' => 'caller' + artifact_1 = create(:ci_job_artifact, :archive) + artifact_2 = create(:ci_job_artifact, :metadata) + artifacts = [artifact_1, artifact_2] + + artifacts.each do |artifact| + expect(Gitlab::AppLogger).to receive(:info).with( + hash_including( + message: 'Artifact created', + job_artifact_id: artifact.id, + size: artifact.size, + file_type: artifact.file_type, + build_id: artifact.job_id, + project_id: artifact.project_id, + 'correlation_id' => an_instance_of(String), + 'meta.feature_category' => 'test', + 'meta.caller_id' => 'caller' + ) ) - ) + end - described_class.log_created(artifact) + described_class.log_created(artifacts) end end @@ -43,7 +47,7 @@ RSpec.describe Gitlab::Ci::Artifacts::Logger do job_artifact_id: artifact.id, expire_at: artifact.expire_at, size: artifact.size, - type: artifact.file_type, + file_type: artifact.file_type, build_id: artifact.job_id, project_id: artifact.project_id, method: method, diff --git a/spec/lib/gitlab/ci/build/auto_retry_spec.rb b/spec/lib/gitlab/ci/build/auto_retry_spec.rb index 9ff9200322e..314714c543b 100644 --- a/spec/lib/gitlab/ci/build/auto_retry_spec.rb +++ b/spec/lib/gitlab/ci/build/auto_retry_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Build::AutoRetry do +RSpec.describe Gitlab::Ci::Build::AutoRetry, feature_category: :pipeline_authoring do let(:auto_retry) { described_class.new(build) } describe '#allowed?' do @@ -112,5 +112,13 @@ RSpec.describe Gitlab::Ci::Build::AutoRetry do expect(result).to eq ['always'] end end + + context 'with retry[:when] set to nil' do + let(:build) { create(:ci_build, options: { retry: { when: nil } }) } + + it 'returns always array' do + expect(result).to eq ['always'] + end + end end end diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb index 31c7437cfe0..ebdb738f10b 100644 --- a/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb +++ b/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb @@ -1,10 +1,8 @@ # frozen_string_literal: true -require 'fast_spec_helper' -require 'support/helpers/stubbed_feature' -require 'support/helpers/stub_feature_flags' +require 'spec_helper' -RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::If do +RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::If, feature_category: :continuous_integration do include StubFeatureFlags subject(:if_clause) { described_class.new(expression) } diff --git a/spec/lib/gitlab/ci/components/instance_path_spec.rb b/spec/lib/gitlab/ci/components/instance_path_spec.rb new file mode 100644 index 00000000000..d9beae0555c --- /dev/null +++ b/spec/lib/gitlab/ci/components/instance_path_spec.rb @@ -0,0 +1,116 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline_authoring do + let_it_be(:user) { create(:user) } + + let(:path) { described_class.new(address: address, content_filename: 'template.yml') } + let(:settings) { Settingslogic.new({ 'component_fqdn' => current_host }) } + let(:current_host) { 'acme.com/' } + + before do + allow(::Settings).to receive(:gitlab_ci).and_return(settings) + end + + describe 'FQDN path' do + let_it_be(:existing_project) { create(:project, :repository) } + + let(:project_path) { existing_project.full_path } + let(:address) { "acme.com/#{project_path}/component@#{version}" } + let(:version) { 'master' } + + context 'when project exists' do + it 'provides the expected attributes', :aggregate_failures do + expect(path.project).to eq(existing_project) + expect(path.host).to eq(current_host) + expect(path.sha).to eq(existing_project.commit('master').id) + expect(path.project_file_path).to eq('component/template.yml') + end + + context 'when content exists' do + let(:content) { 'image: alpine' } + + before do + allow_next_instance_of(Repository) do |instance| + allow(instance) + .to receive(:blob_data_at) + .with(existing_project.commit('master').id, 'component/template.yml') + .and_return(content) + end + end + + context 'when user has permissions to read code' do + before do + existing_project.add_developer(user) + end + + it 'fetches the content' do + expect(path.fetch_content!(current_user: user)).to eq(content) + end + end + + context 'when user does not have permissions to download code' do + it 'raises an error when fetching the content' do + expect { path.fetch_content!(current_user: user) } + .to raise_error(Gitlab::Access::AccessDeniedError) + end + end + end + end + + context 'when project path is nested under a subgroup' do + let(:existing_group) { create(:group, :nested) } + let(:existing_project) { create(:project, :repository, group: existing_group) } + + it 'provides the expected attributes', :aggregate_failures do + expect(path.project).to eq(existing_project) + expect(path.host).to eq(current_host) + expect(path.sha).to eq(existing_project.commit('master').id) + expect(path.project_file_path).to eq('component/template.yml') + end + end + + context 'when current GitLab instance is installed on a relative URL' do + let(:address) { "acme.com/gitlab/#{project_path}/component@#{version}" } + let(:current_host) { 'acme.com/gitlab/' } + + it 'provides the expected attributes', :aggregate_failures do + expect(path.project).to eq(existing_project) + expect(path.host).to eq(current_host) + expect(path.sha).to eq(existing_project.commit('master').id) + expect(path.project_file_path).to eq('component/template.yml') + end + end + + context 'when version does not exist' do + let(:version) { 'non-existent' } + + it 'provides the expected attributes', :aggregate_failures do + expect(path.project).to eq(existing_project) + expect(path.host).to eq(current_host) + expect(path.sha).to be_nil + expect(path.project_file_path).to eq('component/template.yml') + end + + it 'returns nil when fetching the content' do + expect(path.fetch_content!(current_user: user)).to be_nil + end + end + + context 'when project does not exist' do + let(:project_path) { 'non-existent/project' } + + it 'provides the expected attributes', :aggregate_failures do + expect(path.project).to be_nil + expect(path.host).to eq(current_host) + expect(path.sha).to be_nil + expect(path.project_file_path).to be_nil + end + + it 'returns nil when fetching the content' do + expect(path.fetch_content!(current_user: user)).to be_nil + end + end + end +end diff --git a/spec/lib/gitlab/ci/config/entry/include_spec.rb b/spec/lib/gitlab/ci/config/entry/include_spec.rb index fd7f85c9298..5eecff5b592 100644 --- a/spec/lib/gitlab/ci/config/entry/include_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/include_spec.rb @@ -44,6 +44,12 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Include do it { is_expected.to be_valid } end + context 'when using "component"' do + let(:config) { { component: 'path/to/component@1.0' } } + + it { is_expected.to be_valid } + end + context 'when using "artifact"' do context 'and specifying "job"' do let(:config) { { artifact: 'test.yml', job: 'generator' } } diff --git a/spec/lib/gitlab/ci/config/external/context_spec.rb b/spec/lib/gitlab/ci/config/external/context_spec.rb index 40702e75404..1fd3cf3c99f 100644 --- a/spec/lib/gitlab/ci/config/external/context_spec.rb +++ b/spec/lib/gitlab/ci/config/external/context_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::Context do +RSpec.describe Gitlab::Ci::Config::External::Context, feature_category: :pipeline_authoring do let(:project) { build(:project) } let(:user) { double('User') } let(:sha) { '12345' } @@ -14,7 +14,8 @@ RSpec.describe Gitlab::Ci::Config::External::Context do describe 'attributes' do context 'with values' do it { is_expected.to have_attributes(**attributes) } - it { expect(subject.expandset).to eq(Set.new) } + it { expect(subject.expandset).to eq([]) } + it { expect(subject.max_includes).to eq(Gitlab::Ci::Config::External::Context::NEW_MAX_INCLUDES) } it { expect(subject.execution_deadline).to eq(0) } it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } @@ -25,11 +26,39 @@ RSpec.describe Gitlab::Ci::Config::External::Context do let(:attributes) { { project: nil, user: nil, sha: nil } } it { is_expected.to have_attributes(**attributes) } - it { expect(subject.expandset).to eq(Set.new) } + it { expect(subject.expandset).to eq([]) } + it { expect(subject.max_includes).to eq(Gitlab::Ci::Config::External::Context::NEW_MAX_INCLUDES) } it { expect(subject.execution_deadline).to eq(0) } it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } end + + context 'when FF ci_includes_count_duplicates is disabled' do + before do + stub_feature_flags(ci_includes_count_duplicates: false) + end + + context 'with values' do + it { is_expected.to have_attributes(**attributes) } + it { expect(subject.expandset).to eq(Set.new) } + it { expect(subject.max_includes).to eq(Gitlab::Ci::Config::External::Context::MAX_INCLUDES) } + it { expect(subject.execution_deadline).to eq(0) } + it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } + it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } + it { expect(subject.variables_hash).to include('a' => 'b') } + end + + context 'without values' do + let(:attributes) { { project: nil, user: nil, sha: nil } } + + it { is_expected.to have_attributes(**attributes) } + it { expect(subject.expandset).to eq(Set.new) } + it { expect(subject.max_includes).to eq(Gitlab::Ci::Config::External::Context::MAX_INCLUDES) } + it { expect(subject.execution_deadline).to eq(0) } + it { expect(subject.variables).to be_instance_of(Gitlab::Ci::Variables::Collection) } + it { expect(subject.variables_hash).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } + end + end end describe '#set_deadline' do diff --git a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb index a8dc7897082..45a15fb5f36 100644 --- a/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/artifact_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::File::Artifact do +RSpec.describe Gitlab::Ci::Config::External::File::Artifact, feature_category: :pipeline_authoring do let(:parent_pipeline) { create(:ci_pipeline) } let(:variables) {} let(:context) do @@ -31,7 +31,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact do describe '#valid?' do subject(:valid?) do - external_file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([external_file]) external_file.valid? end @@ -162,7 +162,8 @@ RSpec.describe Gitlab::Ci::Config::External::File::Artifact do user: anything } expect(context).to receive(:mutate).with(expected_attrs).and_call_original - external_file.validate! + + expect(valid?).to be_truthy external_file.content end end diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb index 8475c3a8b19..55d95d0c1f8 100644 --- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::File::Base do +RSpec.describe Gitlab::Ci::Config::External::File::Base, feature_category: :pipeline_authoring do let(:variables) {} let(:context_params) { { sha: 'HEAD', variables: variables } } let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) } @@ -51,7 +51,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do describe '#valid?' do subject(:valid?) do - file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([file]) file.valid? end diff --git a/spec/lib/gitlab/ci/config/external/file/component_spec.rb b/spec/lib/gitlab/ci/config/external/file/component_spec.rb new file mode 100644 index 00000000000..a162a1a8abf --- /dev/null +++ b/spec/lib/gitlab/ci/config/external/file/component_spec.rb @@ -0,0 +1,179 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Config::External::File::Component, feature_category: :pipeline_authoring do + let_it_be(:context_project) { create(:project, :repository) } + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { create(:user) } + let_it_be(:project_variables) { project.predefined_variables } + + let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) } + let(:external_resource) { described_class.new(params, context) } + let(:params) { { component: 'gitlab.com/acme/components/my-component@1.0' } } + let(:fetch_service) { instance_double(::Ci::Components::FetchService) } + let(:response) { ServiceResponse.error(message: 'some error message') } + + let(:context_params) do + { + project: context_project, + sha: '12345', + user: user, + variables: project_variables + } + end + + before do + allow(::Ci::Components::FetchService) + .to receive(:new) + .with( + address: params[:component], + current_user: context.user + ).and_return(fetch_service) + + allow(fetch_service).to receive(:execute).and_return(response) + end + + describe '#matching?' do + subject(:matching) { external_resource.matching? } + + context 'when component is specified' do + let(:params) { { component: 'some-value' } } + + it { is_expected.to be_truthy } + + context 'when feature flag ci_include_components is disabled' do + before do + stub_feature_flags(ci_include_components: false) + end + + it { is_expected.to be_falsey } + end + end + + context 'when component is not specified' do + let(:params) { { local: 'some-value' } } + + it { is_expected.to be_falsy } + end + end + + describe '#valid?' do + subject(:valid?) do + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([external_resource]) + external_resource.valid? + end + + context 'when the context project does not have a repository' do + before do + allow(context_project).to receive(:repository).and_return(nil) + end + + it 'is invalid' do + expect(subject).to be_falsy + expect(external_resource.error_message).to eq('Unable to use components outside of a project context') + end + end + + context 'when location is not provided' do + let(:params) { { component: 123 } } + + it 'is invalid' do + expect(subject).to be_falsy + expect(external_resource.error_message).to eq('Included file `123` needs to be a string') + end + end + + context 'when component path is provided' do + context 'when component is not found' do + let(:response) do + ServiceResponse.error(message: 'Content not found') + end + + it 'is invalid' do + expect(subject).to be_falsy + expect(external_resource.error_message).to eq('Content not found') + end + end + + context 'when component is found' do + let(:content) do + <<~COMPONENT + job: + script: echo + COMPONENT + end + + let(:response) do + ServiceResponse.success(payload: { + content: content, + path: instance_double(::Gitlab::Ci::Components::InstancePath, project: project, sha: '12345') + }) + end + + it 'is valid' do + expect(subject).to be_truthy + expect(external_resource.content).to eq(content) + end + + context 'when content is not a valid YAML' do + let(:content) { 'the-content' } + + it 'is invalid' do + expect(subject).to be_falsy + expect(external_resource.error_message).to match(/does not have valid YAML syntax/) + end + end + end + end + end + + describe '#metadata' do + subject(:metadata) { external_resource.metadata } + + let(:component_path) do + instance_double(::Gitlab::Ci::Components::InstancePath, + project: project, + sha: '12345', + project_file_path: 'my-component/template.yml') + end + + let(:response) do + ServiceResponse.success(payload: { path: component_path }) + end + + it 'returns the metadata' do + is_expected.to include( + context_project: context_project.full_path, + context_sha: context.sha, + type: :component, + location: 'gitlab.com/acme/components/my-component@1.0', + blob: a_string_ending_with("#{project.full_path}/-/blob/12345/my-component/template.yml"), + raw: nil, + extra: {} + ) + end + end + + describe '#expand_context' do + let(:component_path) do + instance_double(::Gitlab::Ci::Components::InstancePath, + project: project, + sha: '12345') + end + + let(:response) do + ServiceResponse.success(payload: { path: component_path }) + end + + subject { external_resource.send(:expand_context_attrs) } + + it 'inherits user and variables while changes project and sha' do + is_expected.to include( + project: project, + sha: '12345', + user: context.user, + variables: context.variables) + end + end +end diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb index a77acb45978..b5895b4bc81 100644 --- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb @@ -30,6 +30,40 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip .to receive(:check_execution_time!) end + describe '.initialize' do + context 'when a local is specified' do + let(:params) { { local: 'file' } } + + it 'sets the location' do + expect(local_file.location).to eq('file') + end + + context 'when the local is prefixed with a slash' do + let(:params) { { local: '/file' } } + + it 'removes the slash' do + expect(local_file.location).to eq('file') + end + end + + context 'when the local is prefixed with multiple slashes' do + let(:params) { { local: '//file' } } + + it 'removes slashes' do + expect(local_file.location).to eq('file') + end + end + end + + context 'with a missing local' do + let(:params) { { local: nil } } + + it 'sets the location to an empty string' do + expect(local_file.location).to eq('') + end + end + end + describe '#matching?' do context 'when a local is specified' do let(:params) { { local: 'file' } } @@ -58,7 +92,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip describe '#valid?' do subject(:valid?) do - local_file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([local_file]) local_file.valid? end @@ -88,10 +122,13 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret', 'masked' => true }]) } let(:location) { '/lib/gitlab/ci/templates/secret/existent-file.yml' } - it 'returns false and adds an error message about an empty file' do + before do allow_any_instance_of(described_class).to receive(:fetch_local_content).and_return("") - local_file.validate! - expect(local_file.errors).to include("Local file `/lib/gitlab/ci/templates/xxxxxx/existent-file.yml` is empty!") + end + + it 'returns false and adds an error message about an empty file' do + expect(valid?).to be_falsy + expect(local_file.errors).to include("Local file `lib/gitlab/ci/templates/xxxxxx/existent-file.yml` is empty!") end end @@ -101,7 +138,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip it 'returns false and adds an error message stating that included file does not exist' do expect(valid?).to be_falsy - expect(local_file.errors).to include("Sha #{sha} is not valid!") + expect(local_file.errors).to include("Local file `lib/gitlab/ci/templates/existent-file.yml` does not exist!") end end end @@ -143,11 +180,11 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file', 'masked' => true }]) } before do - local_file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([local_file]) end it 'returns an error message' do - expect(local_file.error_message).to eq("Local file `/lib/gitlab/ci/templates/xxxxxxxxxxx.yml` does not exist!") + expect(local_file.error_message).to eq("Local file `lib/gitlab/ci/templates/xxxxxxxxxxx.yml` does not exist!") end end @@ -203,7 +240,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pip context_project: project.full_path, context_sha: sha, type: :local, - location: '/lib/gitlab/ci/templates/existent-file.yml', + location: 'lib/gitlab/ci/templates/existent-file.yml', blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/lib/gitlab/ci/templates/existent-file.yml", raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/lib/gitlab/ci/templates/existent-file.yml", extra: {} diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb index 0ba92d1e92d..abe38cdbc3e 100644 --- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb @@ -2,7 +2,9 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::File::Project do +RSpec.describe Gitlab::Ci::Config::External::File::Project, feature_category: :pipeline_authoring do + include RepoHelpers + let_it_be(:context_project) { create(:project) } let_it_be(:project) { create(:project, :repository) } let_it_be(:user) { create(:user) } @@ -12,11 +14,12 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) } let(:project_file) { described_class.new(params, context) } let(:variables) { project.predefined_variables.to_runner_variables } + let(:project_sha) { project.commit.sha } let(:context_params) do { project: context_project, - sha: '12345', + sha: project_sha, user: context_user, parent_pipeline: parent_pipeline, variables: variables @@ -67,7 +70,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do describe '#valid?' do subject(:valid?) do - project_file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([project_file]) project_file.valid? end @@ -76,10 +79,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do { project: project.full_path, file: '/file.yml' } end - let(:root_ref_sha) { project.repository.root_ref_sha } - - before do - stub_project_blob(root_ref_sha, '/file.yml') { 'image: image:1.0' } + around do |example| + create_and_delete_files(project, { '/file.yml' => 'image: image:1.0' }) do + example.run + end end it { is_expected.to be_truthy } @@ -99,10 +102,10 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do { project: project.full_path, ref: 'master', file: '/file.yml' } end - let(:ref_sha) { project.commit('master').sha } - - before do - stub_project_blob(ref_sha, '/file.yml') { 'image: image:1.0' } + around do |example| + create_and_delete_files(project, { '/file.yml' => 'image: image:1.0' }) do + example.run + end end it { is_expected.to be_truthy } @@ -114,15 +117,16 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do end let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file', 'masked' => true }]) } - let(:root_ref_sha) { project.repository.root_ref_sha } - before do - stub_project_blob(root_ref_sha, '/secret_file.yml') { '' } + around do |example| + create_and_delete_files(project, { '/secret_file.yml' => '' }) do + example.run + end end it 'returns false' do expect(valid?).to be_falsy - expect(project_file.error_message).to include("Project `#{project.full_path}` file `/xxxxxxxxxxx.yml` is empty!") + expect(project_file.error_message).to include("Project `#{project.full_path}` file `xxxxxxxxxxx.yml` is empty!") end end @@ -146,7 +150,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do it 'returns false' do expect(valid?).to be_falsy - expect(project_file.error_message).to include("Project `#{project.full_path}` file `/xxxxxxxxxxxxxxxxxxx.yml` does not exist!") + expect(project_file.error_message).to include("Project `#{project.full_path}` file `xxxxxxxxxxxxxxxxxxx.yml` does not exist!") end end @@ -157,7 +161,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do it 'returns false' do expect(valid?).to be_falsy - expect(project_file.error_message).to include('Included file `/invalid-file` does not have YAML extension!') + expect(project_file.error_message).to include('Included file `invalid-file` does not have YAML extension!') end end @@ -200,7 +204,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do is_expected.to include( user: user, project: project, - sha: project.commit('master').id, + sha: project_sha, parent_pipeline: parent_pipeline, variables: project.predefined_variables.to_runner_variables) end @@ -216,45 +220,43 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do it { is_expected.to eq( context_project: context_project.full_path, - context_sha: '12345', + context_sha: project_sha, type: :file, - location: '/file.yml', - blob: "http://localhost/#{project.full_path}/-/blob/#{project.commit('master').id}/file.yml", - raw: "http://localhost/#{project.full_path}/-/raw/#{project.commit('master').id}/file.yml", + location: 'file.yml', + blob: "http://localhost/#{project.full_path}/-/blob/#{project_sha}/file.yml", + raw: "http://localhost/#{project.full_path}/-/raw/#{project_sha}/file.yml", extra: { project: project.full_path, ref: 'HEAD' } ) } context 'when project name and ref include masked variables' do + let(:project_name) { 'my_project_name' } + let(:branch_name) { 'merge-commit-analyze-after' } + let(:project) { create(:project, :repository, name: project_name) } + let(:namespace_path) { project.namespace.full_path } + let(:included_project_sha) { project.commit(branch_name).sha } + let(:variables) do Gitlab::Ci::Variables::Collection.new( [ - { key: 'VAR1', value: 'a_secret_variable_value1', masked: true }, - { key: 'VAR2', value: 'a_secret_variable_value2', masked: true } + { key: 'VAR1', value: project_name, masked: true }, + { key: 'VAR2', value: branch_name, masked: true } ]) end - let(:params) { { project: 'a_secret_variable_value1', ref: 'a_secret_variable_value2', file: '/file.yml' } } + let(:params) { { project: project.full_path, ref: branch_name, file: '/file.yml' } } it { is_expected.to eq( context_project: context_project.full_path, - context_sha: '12345', + context_sha: project_sha, type: :file, - location: '/file.yml', - blob: nil, - raw: nil, - extra: { project: 'xxxxxxxxxxxxxxxxxxxxxxxx', ref: 'xxxxxxxxxxxxxxxxxxxxxxxx' } + location: 'file.yml', + blob: "http://localhost/#{namespace_path}/xxxxxxxxxxxxxxx/-/blob/#{included_project_sha}/file.yml", + raw: "http://localhost/#{namespace_path}/xxxxxxxxxxxxxxx/-/raw/#{included_project_sha}/file.yml", + extra: { project: "#{namespace_path}/xxxxxxxxxxxxxxx", ref: 'xxxxxxxxxxxxxxxxxxxxxxxxxx' } ) } end end - - private - - def stub_project_blob(ref, path) - allow_next_instance_of(Repository) do |instance| - allow(instance).to receive(:blob_data_at).with(ref, path) { yield } - end - end end diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb index 8d93cdcf378..2ce3c257a43 100644 --- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::File::Remote do +RSpec.describe Gitlab::Ci::Config::External::File::Remote, feature_category: :pipeline_authoring do include StubRequests let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file', 'masked' => true }]) } @@ -55,7 +55,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote do describe "#valid?" do subject(:valid?) do - remote_file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([remote_file]) remote_file.valid? end @@ -138,7 +138,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Remote do describe "#error_message" do subject(:error_message) do - remote_file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([remote_file]) remote_file.error_message end diff --git a/spec/lib/gitlab/ci/config/external/file/template_spec.rb b/spec/lib/gitlab/ci/config/external/file/template_spec.rb index 074e7a1d32d..83e98874118 100644 --- a/spec/lib/gitlab/ci/config/external/file/template_spec.rb +++ b/spec/lib/gitlab/ci/config/external/file/template_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::File::Template do +RSpec.describe Gitlab::Ci::Config::External::File::Template, feature_category: :pipeline_authoring do let_it_be(:project) { create(:project) } let_it_be(:user) { create(:user) } @@ -46,7 +46,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Template do describe "#valid?" do subject(:valid?) do - template_file.validate! + Gitlab::Ci::Config::External::Mapper::Verifier.new(context).process([template_file]) template_file.valid? end diff --git a/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb index 5f321a696c9..11c79e19cff 100644 --- a/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb +++ b/spec/lib/gitlab/ci/config/external/mapper/matcher_spec.rb @@ -17,11 +17,14 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category: describe '#process' do let(:locations) do - [{ local: 'file.yml' }, - { file: 'file.yml', project: 'namespace/project' }, - { remote: 'https://example.com/.gitlab-ci.yml' }, - { template: 'file.yml' }, - { artifact: 'generated.yml', job: 'test' }] + [ + { local: 'file.yml' }, + { file: 'file.yml', project: 'namespace/project' }, + { component: 'gitlab.com/org/component@1.0' }, + { remote: 'https://example.com/.gitlab-ci.yml' }, + { template: 'file.yml' }, + { artifact: 'generated.yml', job: 'test' } + ] end subject(:process) { matcher.process(locations) } @@ -30,6 +33,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category: is_expected.to contain_exactly( an_instance_of(Gitlab::Ci::Config::External::File::Local), an_instance_of(Gitlab::Ci::Config::External::File::Project), + an_instance_of(Gitlab::Ci::Config::External::File::Component), an_instance_of(Gitlab::Ci::Config::External::File::Remote), an_instance_of(Gitlab::Ci::Config::External::File::Template), an_instance_of(Gitlab::Ci::Config::External::File::Artifact) @@ -42,8 +46,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category: it 'raises an error' do expect { process }.to raise_error( Gitlab::Ci::Config::External::Mapper::AmbigiousSpecificationError, - '`{"invalid":"file.yml"}` does not have a valid subkey for include. ' \ - 'Valid subkeys are: `local`, `project`, `remote`, `template`, `artifact`' + /`{"invalid":"file.yml"}` does not have a valid subkey for include. Valid subkeys are:/ ) end @@ -53,8 +56,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category: it 'raises an error with a masked sentence' do expect { process }.to raise_error( Gitlab::Ci::Config::External::Mapper::AmbigiousSpecificationError, - '`{"invalid":"xxxxxxxxxxxxxx.yml"}` does not have a valid subkey for include. ' \ - 'Valid subkeys are: `local`, `project`, `remote`, `template`, `artifact`' + /`{"invalid":"xxxxxxxxxxxxxx.yml"}` does not have a valid subkey for include. Valid subkeys are:/ ) end end @@ -66,7 +68,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Matcher, feature_category: it 'raises an error' do expect { process }.to raise_error( Gitlab::Ci::Config::External::Mapper::AmbigiousSpecificationError, - "Each include must use only one of: `local`, `project`, `remote`, `template`, `artifact`" + /Each include must use only one of:/ ) end end diff --git a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb index 7c7252c6b0e..a219666f24e 100644 --- a/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb +++ b/spec/lib/gitlab/ci/config/external/mapper/verifier_spec.rb @@ -25,6 +25,10 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category: my_test: script: echo Hello World YAML + 'myfolder/file3.yml' => <<~YAML, + my_deploy: + script: echo Hello World + YAML 'nested_configs.yml' => <<~YAML include: - local: myfolder/file1.yml @@ -58,16 +62,63 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category: let(:files) do [ Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file1.yml' }, context), - Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file2.yml' }, context) + Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file2.yml' }, context), + Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file3.yml' }, context) ] end it 'returns an array of file objects' do - expect(process.map(&:location)).to contain_exactly('myfolder/file1.yml', 'myfolder/file2.yml') + expect(process.map(&:location)).to contain_exactly( + 'myfolder/file1.yml', 'myfolder/file2.yml', 'myfolder/file3.yml' + ) + end + + it 'adds files to the expandset' do + expect { process }.to change { context.expandset.count }.by(3) + end + + it 'calls Gitaly only once for all files', :request_store do + # 1 for project.commit.id, 1 for the files + expect { process }.to change { Gitlab::GitalyClient.get_request_count }.by(2) + end + end + + context 'when files are project files' do + let_it_be(:included_project) { create(:project, :repository, namespace: project.namespace, creator: user) } + + let(:files) do + [ + Gitlab::Ci::Config::External::File::Project.new( + { file: 'myfolder/file1.yml', project: included_project.full_path }, context + ), + Gitlab::Ci::Config::External::File::Project.new( + { file: 'myfolder/file2.yml', project: included_project.full_path }, context + ), + Gitlab::Ci::Config::External::File::Project.new( + { file: 'myfolder/file3.yml', project: included_project.full_path }, context + ) + ] + end + + around(:all) do |example| + create_and_delete_files(included_project, project_files) do + example.run + end + end + + it 'returns an array of file objects' do + expect(process.map(&:location)).to contain_exactly( + 'myfolder/file1.yml', 'myfolder/file2.yml', 'myfolder/file3.yml' + ) end it 'adds files to the expandset' do - expect { process }.to change { context.expandset.count }.by(2) + expect { process }.to change { context.expandset.count }.by(3) + end + + it 'calls Gitaly only once for all files', :request_store do + # 1 for project.commit.id, 3 for the sha check, 1 for the files + expect { process }.to change { Gitlab::GitalyClient.get_request_count }.by(5) end end @@ -99,7 +150,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category: end end - context 'when max_includes is exceeded' do + context 'when total file count exceeds max_includes' do context 'when files are nested' do let(:files) do [ @@ -107,11 +158,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category: ] end - before do - allow(context).to receive(:max_includes).and_return(1) - end - it 'raises Processor::IncludeError' do + allow(context).to receive(:max_includes).and_return(1) expect { process }.to raise_error(Gitlab::Ci::Config::External::Processor::IncludeError) end end @@ -124,13 +172,36 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper::Verifier, feature_category: ] end - before do + it 'raises Mapper::TooManyIncludesError' do allow(context).to receive(:max_includes).and_return(1) + expect { process }.to raise_error(Gitlab::Ci::Config::External::Mapper::TooManyIncludesError) end + end - it 'raises Mapper::TooManyIncludesError' do + context 'when files are duplicates' do + let(:files) do + [ + Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file1.yml' }, context), + Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file1.yml' }, context), + Gitlab::Ci::Config::External::File::Local.new({ local: 'myfolder/file1.yml' }, context) + ] + end + + it 'raises error' do + allow(context).to receive(:max_includes).and_return(2) expect { process }.to raise_error(Gitlab::Ci::Config::External::Mapper::TooManyIncludesError) end + + context 'when FF ci_includes_count_duplicates is disabled' do + before do + stub_feature_flags(ci_includes_count_duplicates: false) + end + + it 'does not raise error' do + allow(context).to receive(:max_includes).and_return(2) + expect { process }.not_to raise_error + end + end end end end diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb index 9d0e57d4292..b3115617084 100644 --- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb +++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb @@ -2,9 +2,7 @@ require 'spec_helper' -# This will be use with the FF ci_refactoring_external_mapper_verifier in the next MR. -# It can be removed when the FF is removed. -RSpec.shared_context 'gitlab_ci_config_external_mapper' do +RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline_authoring do include StubRequests include RepoHelpers @@ -124,7 +122,7 @@ RSpec.shared_context 'gitlab_ci_config_external_mapper' do end it 'returns ambigious specification error' do - expect { subject }.to raise_error(described_class::AmbigiousSpecificationError, '`{"invalid":"secret-file.yml"}` does not have a valid subkey for include. Valid subkeys are: `local`, `project`, `remote`, `template`, `artifact`') + expect { subject }.to raise_error(described_class::AmbigiousSpecificationError, /`{"invalid":"secret-file.yml"}` does not have a valid subkey for include. Valid subkeys are:/) end end @@ -138,7 +136,7 @@ RSpec.shared_context 'gitlab_ci_config_external_mapper' do end it 'returns ambigious specification error' do - expect { subject }.to raise_error(described_class::AmbigiousSpecificationError, 'Each include must use only one of: `local`, `project`, `remote`, `template`, `artifact`') + expect { subject }.to raise_error(described_class::AmbigiousSpecificationError, /Each include must use only one of/) end end @@ -168,7 +166,7 @@ RSpec.shared_context 'gitlab_ci_config_external_mapper' do an_instance_of(Gitlab::Ci::Config::External::File::Project)) end - it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 2 + it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 1 end end @@ -232,9 +230,20 @@ RSpec.shared_context 'gitlab_ci_config_external_mapper' do expect { process }.not_to raise_error end - it 'has expanset with one' do + it 'has expanset with two' do process - expect(context.expandset.size).to eq(1) + expect(context.expandset.size).to eq(2) + end + + context 'when FF ci_includes_count_duplicates is disabled' do + before do + stub_feature_flags(ci_includes_count_duplicates: false) + end + + it 'has expanset with one' do + process + expect(context.expandset.size).to eq(1) + end end end @@ -464,7 +473,3 @@ RSpec.shared_context 'gitlab_ci_config_external_mapper' do end end end - -RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline_authoring do - it_behaves_like 'gitlab_ci_config_external_mapper' -end diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb index c9efaf2e1af..bb65c2ef10c 100644 --- a/spec/lib/gitlab/ci/config/external/processor_spec.rb +++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb @@ -52,7 +52,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel it 'raises an error' do expect { processor.perform }.to raise_error( described_class::IncludeError, - "Local file `/lib/gitlab/ci/templates/non-existent-file.yml` does not exist!" + "Local file `lib/gitlab/ci/templates/non-existent-file.yml` does not exist!" ) end end @@ -221,7 +221,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel it 'raises an error' do expect { processor.perform }.to raise_error( described_class::IncludeError, - "Included file `/lib/gitlab/ci/templates/template.yml` does not have valid YAML syntax!" + "Included file `lib/gitlab/ci/templates/template.yml` does not have valid YAML syntax!" ) end end @@ -313,7 +313,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel expect(context.includes).to contain_exactly( { type: :local, - location: '/local/file.yml', + location: 'local/file.yml', blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/local/file.yml", raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/local/file.yml", extra: {}, @@ -334,14 +334,14 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel context_project: project.full_path, context_sha: sha }, { type: :file, - location: '/templates/my-workflow.yml', + location: 'templates/my-workflow.yml', blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-workflow.yml", raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-workflow.yml", extra: { project: another_project.full_path, ref: 'HEAD' }, context_project: project.full_path, context_sha: sha }, { type: :local, - location: '/templates/my-build.yml', + location: 'templates/my-build.yml', blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-build.yml", raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-build.yml", extra: {}, @@ -400,6 +400,44 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel end end + describe 'include:component' do + let(:values) do + { + include: { component: "#{Gitlab.config.gitlab.host}/#{another_project.full_path}/component-x@master" }, + image: 'image:1.0' + } + end + + let(:other_project_files) do + { + '/component-x/template.yml' => <<~YAML + component_x_job: + script: echo Component X + YAML + } + end + + before do + another_project.add_developer(user) + end + + it 'appends the file to the values' do + output = processor.perform + expect(output.keys).to match_array([:image, :component_x_job]) + end + + context 'when feature flag ci_include_components is disabled' do + before do + stub_feature_flags(ci_include_components: false) + end + + it 'returns an error' do + expect { processor.perform } + .to raise_error(described_class::IncludeError, /does not have a valid subkey for include./) + end + end + end + context 'when a valid project file is defined' do let(:values) do { @@ -465,7 +503,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel expect(context.includes).to contain_exactly( { type: :file, - location: '/templates/my-build.yml', + location: 'templates/my-build.yml', blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-build.yml", raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-build.yml", extra: { project: another_project.full_path, ref: 'HEAD' }, @@ -474,7 +512,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor, feature_category: :pipel { type: :file, blob: "http://localhost/#{another_project.full_path}/-/blob/#{another_project.commit.sha}/templates/my-test.yml", raw: "http://localhost/#{another_project.full_path}/-/raw/#{another_project.commit.sha}/templates/my-test.yml", - location: '/templates/my-test.yml', + location: 'templates/my-test.yml', extra: { project: another_project.full_path, ref: 'HEAD' }, context_project: project.full_path, context_sha: sha } diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb index e2bb55f3854..227b62d8ce8 100644 --- a/spec/lib/gitlab/ci/config/external/rules_spec.rb +++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Config::External::Rules do +RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_authoring do let(:rule_hashes) {} subject(:rules) { described_class.new(rule_hashes) } diff --git a/spec/lib/gitlab/ci/config/yaml_spec.rb b/spec/lib/gitlab/ci/config/yaml_spec.rb new file mode 100644 index 00000000000..4b34553f55e --- /dev/null +++ b/spec/lib/gitlab/ci/config/yaml_spec.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Config::Yaml, feature_category: :pipeline_authoring do + describe '.load!' do + it 'loads a single-doc YAML file' do + yaml = <<~YAML + image: 'image:1.0' + texts: + nested_key: 'value1' + more_text: + more_nested_key: 'value2' + YAML + + config = described_class.load!(yaml) + + expect(config).to eq({ + image: 'image:1.0', + texts: { + nested_key: 'value1', + more_text: { + more_nested_key: 'value2' + } + } + }) + end + + it 'loads the first document from a multi-doc YAML file' do + yaml = <<~YAML + spec: + inputs: + test_input: + --- + image: 'image:1.0' + texts: + nested_key: 'value1' + more_text: + more_nested_key: 'value2' + YAML + + config = described_class.load!(yaml) + + expect(config).to eq({ + spec: { + inputs: { + test_input: nil + } + } + }) + end + + context 'when ci_multi_doc_yaml is disabled' do + before do + stub_feature_flags(ci_multi_doc_yaml: false) + end + + it 'loads a single-doc YAML file' do + yaml = <<~YAML + image: 'image:1.0' + texts: + nested_key: 'value1' + more_text: + more_nested_key: 'value2' + YAML + + config = described_class.load!(yaml) + + expect(config).to eq({ + image: 'image:1.0', + texts: { + nested_key: 'value1', + more_text: { + more_nested_key: 'value2' + } + } + }) + end + + it 'loads the first document from a multi-doc YAML file' do + yaml = <<~YAML + spec: + inputs: + test_input: + --- + image: 'image:1.0' + texts: + nested_key: 'value1' + more_text: + more_nested_key: 'value2' + YAML + + config = described_class.load!(yaml) + + expect(config).to eq({ + spec: { + inputs: { + test_input: nil + } + } + }) + end + end + end +end diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb index 4b750cf3bcf..2c07e4d2224 100644 --- a/spec/lib/gitlab/ci/cron_parser_spec.rb +++ b/spec/lib/gitlab/ci/cron_parser_spec.rb @@ -37,7 +37,7 @@ RSpec.describe Gitlab::Ci::CronParser do end end - context 'when slash used' do + context 'when */ used' do let(:cron) { '*/10 */6 */10 */10 *' } let(:cron_timezone) { 'UTC' } @@ -63,7 +63,7 @@ RSpec.describe Gitlab::Ci::CronParser do end end - context 'when range and slash used' do + context 'when range and / are used' do let(:cron) { '3-59/10 * * * *' } let(:cron_timezone) { 'UTC' } @@ -74,6 +74,17 @@ RSpec.describe Gitlab::Ci::CronParser do end end + context 'when / is used' do + let(:cron) { '3/10 * * * *' } + let(:cron_timezone) { 'UTC' } + + it_behaves_like returns_time_for_epoch + + it 'returns specific time' do + expect(subject.min).to be_in([3, 13, 23, 33, 43, 53]) + end + end + context 'when cron_timezone is TZInfo format' do before do allow(Time).to receive(:zone) diff --git a/spec/lib/gitlab/ci/interpolation/access_spec.rb b/spec/lib/gitlab/ci/interpolation/access_spec.rb new file mode 100644 index 00000000000..9f6108a328d --- /dev/null +++ b/spec/lib/gitlab/ci/interpolation/access_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Ci::Interpolation::Access, feature_category: :pipeline_authoring do + subject { described_class.new(access, ctx) } + + let(:access) do + 'inputs.data' + end + + let(:ctx) do + { inputs: { data: 'abcd' }, env: { 'ENV' => 'dev' } } + end + + it 'properly evaluates the access pattern' do + expect(subject.value).to eq 'abcd' + end + + context 'when there are too many objects in the access path' do + let(:access) { 'a.b.c.d.e.f.g.h' } + + it 'only support MAX_ACCESS_OBJECTS steps' do + expect(subject.objects.count).to eq 5 + end + end + + context 'when access expression size is too large' do + before do + stub_const("#{described_class}::MAX_ACCESS_BYTESIZE", 10) + end + + it 'returns an error' do + expect(subject).not_to be_valid + expect(subject.errors.first) + .to eq 'maximum interpolation expression size exceeded' + end + end + + context 'when there are not enough objects in the access path' do + let(:access) { 'abc[123]' } + + it 'returns an error when there are no objects found' do + expect(subject).not_to be_valid + expect(subject.errors.first) + .to eq 'invalid interpolation access pattern' + end + end +end diff --git a/spec/lib/gitlab/ci/interpolation/block_spec.rb b/spec/lib/gitlab/ci/interpolation/block_spec.rb new file mode 100644 index 00000000000..7f2be505d17 --- /dev/null +++ b/spec/lib/gitlab/ci/interpolation/block_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Ci::Interpolation::Block, feature_category: :pipeline_authoring do + subject { described_class.new(block, data, ctx) } + + let(:data) do + 'inputs.data' + end + + let(:block) do + "$[[ #{data} ]]" + end + + let(:ctx) do + { inputs: { data: 'abc' }, env: { 'ENV' => 'dev' } } + end + + it 'knows its content' do + expect(subject.content).to eq 'inputs.data' + end + + it 'properly evaluates the access pattern' do + expect(subject.value).to eq 'abc' + end + + describe '.match' do + it 'matches each block in a string' do + expect { |b| described_class.match('$[[ access1 ]] $[[ access2 ]]', &b) } + .to yield_successive_args(['$[[ access1 ]]', 'access1'], ['$[[ access2 ]]', 'access2']) + end + + it 'matches an empty block' do + expect { |b| described_class.match('$[[]]', &b) } + .to yield_with_args('$[[]]', '') + end + end +end diff --git a/spec/lib/gitlab/ci/interpolation/config_spec.rb b/spec/lib/gitlab/ci/interpolation/config_spec.rb new file mode 100644 index 00000000000..e5987776e00 --- /dev/null +++ b/spec/lib/gitlab/ci/interpolation/config_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Ci::Interpolation::Config, feature_category: :pipeline_authoring do + subject { described_class.new(YAML.safe_load(config)) } + + let(:config) do + <<~CFG + test: + spec: + env: $[[ inputs.env ]] + + $[[ inputs.key ]]: + name: $[[ inputs.key ]] + script: my-value + CFG + end + + describe '#replace!' do + it 'replaces each od the nodes with a block return value' do + result = subject.replace! { |node| "abc#{node}cde" } + + expect(result).to eq({ + 'abctestcde' => { 'abcspeccde' => { 'abcenvcde' => 'abc$[[ inputs.env ]]cde' } }, + 'abc$[[ inputs.key ]]cde' => { + 'abcnamecde' => 'abc$[[ inputs.key ]]cde', + 'abcscriptcde' => 'abcmy-valuecde' + } + }) + end + end + + context 'when config size is exceeded' do + before do + stub_const("#{described_class}::MAX_NODES", 7) + end + + it 'returns a config size error' do + replaced = 0 + + subject.replace! { replaced += 1 } + + expect(replaced).to eq 4 + expect(subject.errors.size).to eq 1 + expect(subject.errors.first).to eq 'config too large' + end + end +end diff --git a/spec/lib/gitlab/ci/interpolation/context_spec.rb b/spec/lib/gitlab/ci/interpolation/context_spec.rb new file mode 100644 index 00000000000..ada896f4980 --- /dev/null +++ b/spec/lib/gitlab/ci/interpolation/context_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Ci::Interpolation::Context, feature_category: :pipeline_authoring do + subject { described_class.new(ctx) } + + let(:ctx) do + { inputs: { key: 'abc' } } + end + + describe '#depth' do + it 'returns a max depth of the hash' do + expect(subject.depth).to eq 2 + end + end + + context 'when interpolation context is too complex' do + let(:ctx) do + { inputs: { key: { aaa: { bbb: 'ccc' } } } } + end + + it 'raises an exception' do + expect { described_class.new(ctx) } + .to raise_error(described_class::ContextTooComplexError) + end + end +end diff --git a/spec/lib/gitlab/ci/interpolation/template_spec.rb b/spec/lib/gitlab/ci/interpolation/template_spec.rb new file mode 100644 index 00000000000..8a243b4db05 --- /dev/null +++ b/spec/lib/gitlab/ci/interpolation/template_spec.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Ci::Interpolation::Template, feature_category: :pipeline_authoring do + subject { described_class.new(YAML.safe_load(config), ctx) } + + let(:config) do + <<~CFG + test: + spec: + env: $[[ inputs.env ]] + + $[[ inputs.key ]]: + name: $[[ inputs.key ]] + script: my-value + CFG + end + + let(:ctx) do + { inputs: { env: 'dev', key: 'abc' } } + end + + it 'collects interpolation blocks' do + expect(subject.size).to eq 2 + end + + it 'interpolates the values properly' do + expect(subject.interpolated).to eq YAML.safe_load <<~RESULT + test: + spec: + env: dev + + abc: + name: abc + script: my-value + RESULT + end + + context 'when interpolation can not be performed' do + let(:config) { '$[[ xxx.yyy ]]: abc' } + + it 'does not interpolate the config' do + expect(subject).not_to be_valid + expect(subject.interpolated).to be_nil + end + end + + context 'when template consists of nested arrays with hashes and values' do + let(:config) do + <<~CFG + test: + - a-$[[ inputs.key ]]-b + - c-$[[ inputs.key ]]-d: + d-$[[ inputs.key ]]-e + val: 1 + CFG + end + + it 'performs a valid interpolation' do + result = { 'test' => ['a-abc-b', { 'c-abc-d' => 'd-abc-e', 'val' => 1 }] } + + expect(subject).to be_valid + expect(subject.interpolated).to eq result + end + end + + context 'when template contains symbols that need interpolation' do + subject do + described_class.new({ '$[[ inputs.key ]]'.to_sym => 'cde' }, ctx) + end + + it 'performs a valid interpolation' do + expect(subject).to be_valid + expect(subject.interpolated).to eq({ 'abc' => 'cde' }) + end + end + + context 'when template is too large' do + before do + stub_const('Gitlab::Ci::Interpolation::Config::MAX_NODES', 1) + end + + it 'returns an error' do + expect(subject.interpolated).to be_nil + expect(subject.errors.count).to eq 1 + expect(subject.errors.first).to eq 'config too large' + end + end + + context 'when there are too many interpolation blocks' do + before do + stub_const("#{described_class}::MAX_BLOCKS", 1) + end + + it 'returns an error' do + expect(subject.interpolated).to be_nil + expect(subject.errors.count).to eq 1 + expect(subject.errors.first).to eq 'too many interpolation blocks' + end + end +end diff --git a/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb b/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb index 30bcce21be2..6772c62ab93 100644 --- a/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb +++ b/spec/lib/gitlab/ci/parsers/instrumentation_spec.rb @@ -8,14 +8,14 @@ RSpec.describe Gitlab::Ci::Parsers::Instrumentation do Class.new do prepend Gitlab::Ci::Parsers::Instrumentation - def parse!(arg1, arg2) + def parse!(arg1, arg2:) "parse #{arg1} #{arg2}" end end end it 'sets metrics for duration of parsing' do - result = parser_class.new.parse!('hello', 'world') + result = parser_class.new.parse!('hello', arg2: 'world') expect(result).to eq('parse hello world') diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb index 03cab021c17..5d2d22c04fc 100644 --- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb +++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb @@ -203,24 +203,35 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do end context 'and name is not provided' do - context 'when CVE identifier exists' do - it 'combines identifier with location to create name' do + context 'when location does not exist' do + let(:location) { nil } + + it 'returns only identifier name' do finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' } - expect(finding.name).to eq("CVE-2017-11429 in yarn.lock") + expect(finding.name).to eq("CVE-2017-11429") end end - context 'when CWE identifier exists' do - it 'combines identifier with location to create name' do - finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' } - expect(finding.name).to eq("CWE-2017-11429 in yarn.lock") + context 'when location exists' do + context 'when CVE identifier exists' do + it 'combines identifier with location to create name' do + finding = report.findings.find { |x| x.compare_key == 'CVE-2017-11429' } + expect(finding.name).to eq("CVE-2017-11429 in yarn.lock") + end + end + + context 'when CWE identifier exists' do + it 'combines identifier with location to create name' do + finding = report.findings.find { |x| x.compare_key == 'CWE-2017-11429' } + expect(finding.name).to eq("CWE-2017-11429 in yarn.lock") + end end - end - context 'when neither CVE nor CWE identifier exist' do - it 'combines identifier with location to create name' do - finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' } - expect(finding.name).to eq("other-2017-11429 in yarn.lock") + context 'when neither CVE nor CWE identifier exist' do + it 'combines identifier with location to create name' do + finding = report.findings.find { |x| x.compare_key == 'OTHER-2017-11429' } + expect(finding.name).to eq("other-2017-11429 in yarn.lock") + end end end end diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb index 16deeb6916f..31bffcbeb2a 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb @@ -2,208 +2,20 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do +RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines, feature_category: :continuous_integration do let_it_be(:project) { create(:project) } let_it_be(:user) { create(:user) } - - let(:prev_pipeline) { create(:ci_pipeline, project: project) } - let(:new_commit) { create(:commit, project: project) } - let(:pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) } - - let(:command) do - Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) - end - - let(:step) { described_class.new(pipeline, command) } - - before do - create(:ci_build, :interruptible, :running, pipeline: prev_pipeline) - create(:ci_build, :interruptible, :success, pipeline: prev_pipeline) - create(:ci_build, :created, pipeline: prev_pipeline) - - create(:ci_build, :interruptible, pipeline: pipeline) - end + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) } + let_it_be(:step) { described_class.new(pipeline, command) } describe '#perform!' do subject(:perform) { step.perform! } - before do - expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created') - expect(build_statuses(pipeline)).to contain_exactly('pending') - end - - context 'when auto-cancel is enabled' do - before do - project.update!(auto_cancel_pending_pipelines: 'enabled') - end - - it 'cancels only previous interruptible builds' do - perform - - expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled') - expect(build_statuses(pipeline)).to contain_exactly('pending') - end - - it 'logs canceled pipelines' do - allow(Gitlab::AppLogger).to receive(:info) - - perform - - expect(Gitlab::AppLogger).to have_received(:info).with( - class: described_class.name, - message: "Pipeline #{pipeline.id} auto-canceling pipeline #{prev_pipeline.id}", - canceled_pipeline_id: prev_pipeline.id, - canceled_by_pipeline_id: pipeline.id, - canceled_by_pipeline_source: pipeline.source - ) - end - - it 'cancels the builds with 2 queries to avoid query timeout' do - second_query_regex = /WHERE "ci_pipelines"\."id" = \d+ AND \(NOT EXISTS/ - recorder = ActiveRecord::QueryRecorder.new { perform } - second_query = recorder.occurrences.keys.filter { |occ| occ =~ second_query_regex } - - expect(second_query).to be_one - end - - context 'when the previous pipeline has a child pipeline' do - let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) } - - context 'when the child pipeline has interruptible running jobs' do - before do - create(:ci_build, :interruptible, :running, pipeline: child_pipeline) - create(:ci_build, :interruptible, :running, pipeline: child_pipeline) - end - - it 'cancels all child pipeline builds' do - expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running') - - perform - - expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled') - end - - context 'when the child pipeline includes completed interruptible jobs' do - before do - create(:ci_build, :interruptible, :failed, pipeline: child_pipeline) - create(:ci_build, :interruptible, :success, pipeline: child_pipeline) - end - - it 'cancels all child pipeline builds with a cancelable_status' do - expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running', 'failed', 'success') - - perform - - expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled', 'failed', 'success') - end - end - end - - context 'when the child pipeline has started non-interruptible job' do - before do - create(:ci_build, :interruptible, :running, pipeline: child_pipeline) - # non-interruptible started - create(:ci_build, :success, pipeline: child_pipeline) - end + it 'enqueues CancelRedundantPipelinesWorker' do + expect(Ci::CancelRedundantPipelinesWorker).to receive(:perform_async).with(pipeline.id) - it 'does not cancel any child pipeline builds' do - expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success') - - perform - - expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success') - end - end - - context 'when the child pipeline has non-interruptible non-started job' do - before do - create(:ci_build, :interruptible, :running, pipeline: child_pipeline) - end - - not_started_statuses = Ci::HasStatus::AVAILABLE_STATUSES - Ci::HasStatus::STARTED_STATUSES - context 'when the jobs are cancelable' do - cancelable_not_started_statuses = Set.new(not_started_statuses).intersection(Ci::HasStatus::CANCELABLE_STATUSES) - cancelable_not_started_statuses.each do |status| - it "cancels all child pipeline builds when build status #{status} included" do - # non-interruptible but non-started - create(:ci_build, status.to_sym, pipeline: child_pipeline) - - expect(build_statuses(child_pipeline)).to contain_exactly('running', status) - - perform - - expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled') - end - end - end - - context 'when the jobs are not cancelable' do - not_cancelable_not_started_statuses = not_started_statuses - Ci::HasStatus::CANCELABLE_STATUSES - not_cancelable_not_started_statuses.each do |status| - it "does not cancel child pipeline builds when build status #{status} included" do - # non-interruptible but non-started - create(:ci_build, status.to_sym, pipeline: child_pipeline) - - expect(build_statuses(child_pipeline)).to contain_exactly('running', status) - - perform - - expect(build_statuses(child_pipeline)).to contain_exactly('canceled', status) - end - end - end - end - end - - context 'when the pipeline is a child pipeline' do - let!(:parent_pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) } - let(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) } - - before do - create(:ci_build, :interruptible, :running, pipeline: parent_pipeline) - create(:ci_build, :interruptible, :running, pipeline: parent_pipeline) - end - - it 'does not cancel any builds' do - expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created') - expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running') - - perform - - expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created') - expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running') - end - end - - context 'when the previous pipeline source is webide' do - let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) } - - it 'does not cancel builds of the previous pipeline' do - perform - - expect(build_statuses(prev_pipeline)).to contain_exactly('created', 'running', 'success') - expect(build_statuses(pipeline)).to contain_exactly('pending') - end - end + subject end - - context 'when auto-cancel is disabled' do - before do - project.update!(auto_cancel_pending_pipelines: 'disabled') - end - - it 'does not cancel any build' do - subject - - expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created') - expect(build_statuses(pipeline)).to contain_exactly('pending') - end - end - end - - private - - def build_statuses(pipeline) - pipeline.builds.pluck(:status) end end diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb deleted file mode 100644 index bec80a43a76..00000000000 --- a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb +++ /dev/null @@ -1,72 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments, feature_category: :continuous_integration do - let_it_be(:project) { create(:project, :repository) } - let_it_be(:user) { create(:user) } - - let(:stage) { build(:ci_stage, project: project, statuses: [job]) } - let(:pipeline) { create(:ci_pipeline, project: project, stages: [stage]) } - - let(:command) do - Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) - end - - let(:step) { described_class.new(pipeline, command) } - - describe '#perform!' do - subject { step.perform! } - - before do - stub_feature_flags(move_create_deployments_to_worker: false) - job.pipeline = pipeline - end - - context 'when a pipeline contains a deployment job' do - let!(:job) { build(:ci_build, :start_review_app, project: project) } - let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) } - - it 'creates a deployment record' do - expect { subject }.to change { Deployment.count }.by(1) - - job.reset - expect(job.deployment.project).to eq(job.project) - expect(job.deployment.ref).to eq(job.ref) - expect(job.deployment.sha).to eq(job.sha) - expect(job.deployment.deployable).to eq(job) - expect(job.deployment.deployable_type).to eq('CommitStatus') - expect(job.deployment.environment).to eq(job.persisted_environment) - end - - context 'when the corresponding environment does not exist' do - let!(:environment) {} - - it 'does not create a deployment record' do - expect { subject }.not_to change { Deployment.count } - - expect(job.deployment).to be_nil - end - end - end - - context 'when a pipeline contains a teardown job' do - let!(:job) { build(:ci_build, :stop_review_app, project: project) } - let!(:environment) { create(:environment, name: job.expanded_environment_name) } - - it 'does not create a deployment record' do - expect { subject }.not_to change { Deployment.count } - - expect(job.deployment).to be_nil - end - end - - context 'when a pipeline does not contain a deployment job' do - let!(:job) { build(:ci_build, project: project) } - - it 'does not create any deployments' do - expect { subject }.not_to change { Deployment.count } - end - end - end -end diff --git a/spec/lib/gitlab/ci/pipeline/chain/metrics_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/metrics_spec.rb new file mode 100644 index 00000000000..b955d0e7cee --- /dev/null +++ b/spec/lib/gitlab/ci/pipeline/chain/metrics_spec.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Pipeline::Chain::Metrics, feature_category: :continuous_integration do + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } + + let_it_be(:pipeline) do + create(:ci_pipeline, project: project, ref: 'master', user: user, name: 'Build pipeline') + end + + let(:command) do + Gitlab::Ci::Pipeline::Chain::Command.new( + project: project, + current_user: user, + origin_ref: 'master') + end + + let(:step) { described_class.new(pipeline, command) } + + subject(:run_chain) { step.perform! } + + it 'does not break the chain' do + run_chain + + expect(step.break?).to be false + end + + context 'with pipeline name' do + it 'creates snowplow event' do + run_chain + + expect_snowplow_event( + category: described_class.to_s, + action: 'create_pipeline_with_name', + project: pipeline.project, + user: pipeline.user, + namespace: pipeline.project.namespace + ) + end + end + + context 'without pipeline name' do + let_it_be(:pipeline) do + create(:ci_pipeline, project: project, ref: 'master', user: user) + end + + it 'does not create snowplow event' do + run_chain + + expect_no_snowplow_event + end + end +end diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb index 9373888aada..df18e1e4f48 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/abilities_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities, feature_category: :pipeline_execution do +RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::Abilities, feature_category: :continuous_integration do let(:project) { create(:project, :test_repo) } let_it_be(:user) { create(:user) } diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb index 47f172922a5..1a622000c1b 100644 --- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb @@ -1,11 +1,8 @@ # frozen_string_literal: true -require 'fast_spec_helper' -require 'support/helpers/stubbed_feature' -require 'support/helpers/stub_feature_flags' -require_dependency 're2' +require 'spec_helper' -RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Matches do +RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Matches, feature_category: :continuous_integration do include StubFeatureFlags let(:left) { double('left') } diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb index 9e7ea3e4ea4..a60b00457fb 100644 --- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb @@ -1,11 +1,8 @@ # frozen_string_literal: true -require 'fast_spec_helper' -require 'support/helpers/stubbed_feature' -require 'support/helpers/stub_feature_flags' -require_dependency 're2' +require 'spec_helper' -RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::NotMatches do +RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::NotMatches, feature_category: :continuous_integration do include StubFeatureFlags let(:left) { double('left') } diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb index 1f7f800e238..3043d7f5381 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -12,953 +12,860 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_au let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage, when: 'on_success' } } let(:previous_stages) { [] } let(:current_stage) { instance_double(Gitlab::Ci::Pipeline::Seed::Stage, seeds_names: [attributes[:name]]) } - let(:current_ci_stage) { build(:ci_stage, pipeline: pipeline) } - let(:seed_build) { described_class.new(seed_context, attributes, previous_stages + [current_stage], current_ci_stage) } + let(:seed_build) { described_class.new(seed_context, attributes, previous_stages + [current_stage]) } - shared_examples 'build seed' do - describe '#attributes' do - subject { seed_build.attributes } + describe '#attributes' do + subject { seed_build.attributes } - it { is_expected.to be_a(Hash) } - it { is_expected.to include(:name, :project, :ref) } + it { is_expected.to be_a(Hash) } + it { is_expected.to include(:name, :project, :ref) } - context 'with job:when' do - let(:attributes) { { name: 'rspec', ref: 'master', when: 'on_failure' } } + context 'with job:when' do + let(:attributes) { { name: 'rspec', ref: 'master', when: 'on_failure' } } - it { is_expected.to include(when: 'on_failure') } + it { is_expected.to include(when: 'on_failure') } + end + + context 'with job:when:delayed' do + let(:attributes) { { name: 'rspec', ref: 'master', when: 'delayed', options: { start_in: '3 hours' } } } + + it { is_expected.to include(when: 'delayed', options: { start_in: '3 hours' }) } + end + + context 'with job:rules:[when:]' do + context 'is matched' do + let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null', when: 'always' }] } } + + it { is_expected.to include(when: 'always') } end - context 'with job:when:delayed' do - let(:attributes) { { name: 'rspec', ref: 'master', when: 'delayed', options: { start_in: '3 hours' } } } + context 'is not matched' do + let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR != null', when: 'always' }] } } + + it { is_expected.to include(when: 'never') } + end + end + + context 'with job:rules:[when:delayed]' do + context 'is matched' do + let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null', when: 'delayed', start_in: '3 hours' }] } } it { is_expected.to include(when: 'delayed', options: { start_in: '3 hours' }) } end - context 'with job:rules:[when:]' do - context 'is matched' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null', when: 'always' }] } } + context 'is not matched' do + let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR != null', when: 'delayed', start_in: '3 hours' }] } } + + it { is_expected.to include(when: 'never') } + end + end + + context 'with job: rules but no explicit when:' do + let(:base_attributes) { { name: 'rspec', ref: 'master' } } - it { is_expected.to include(when: 'always') } + context 'with a manual job' do + context 'with a matched rule' do + let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR == null' }]) } + + it { is_expected.to include(when: 'manual') } end context 'is not matched' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR != null', when: 'always' }] } } + let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR != null' }]) } it { is_expected.to include(when: 'never') } end end - context 'with job:rules:[when:delayed]' do + context 'with an automatic job' do context 'is matched' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR == null', when: 'delayed', start_in: '3 hours' }] } } + let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR == null' }]) } - it { is_expected.to include(when: 'delayed', options: { start_in: '3 hours' }) } + it { is_expected.to include(when: 'on_success') } end context 'is not matched' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$VAR != null', when: 'delayed', start_in: '3 hours' }] } } + let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR != null' }]) } it { is_expected.to include(when: 'never') } end end + end - context 'with job: rules but no explicit when:' do - let(:base_attributes) { { name: 'rspec', ref: 'master' } } - - context 'with a manual job' do - context 'with a matched rule' do - let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR == null' }]) } - - it { is_expected.to include(when: 'manual') } - end - - context 'is not matched' do - let(:attributes) { base_attributes.merge(when: 'manual', rules: [{ if: '$VAR != null' }]) } - - it { is_expected.to include(when: 'never') } - end - end + context 'with job:rules:[variables:]' do + let(:attributes) do + { name: 'rspec', + ref: 'master', + job_variables: [{ key: 'VAR1', value: 'var 1' }, + { key: 'VAR2', value: 'var 2' }], + rules: [{ if: '$VAR == null', variables: { VAR1: 'new var 1', VAR3: 'var 3' } }] } + end - context 'with an automatic job' do - context 'is matched' do - let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR == null' }]) } + it do + is_expected.to include(yaml_variables: [{ key: 'VAR1', value: 'new var 1' }, + { key: 'VAR3', value: 'var 3' }, + { key: 'VAR2', value: 'var 2' }]) + end + end - it { is_expected.to include(when: 'on_success') } - end + context 'with job:tags' do + let(:attributes) do + { + name: 'rspec', + ref: 'master', + job_variables: [{ key: 'VARIABLE', value: 'value' }], + tag_list: ['static-tag', '$VARIABLE', '$NO_VARIABLE'] + } + end - context 'is not matched' do - let(:attributes) { base_attributes.merge(when: 'on_success', rules: [{ if: '$VAR != null' }]) } + it { is_expected.to include(tag_list: ['static-tag', 'value', '$NO_VARIABLE']) } + it { is_expected.to include(yaml_variables: [{ key: 'VARIABLE', value: 'value' }]) } + end - it { is_expected.to include(when: 'never') } - end - end + context 'with cache:key' do + let(:attributes) do + { + name: 'rspec', + ref: 'master', + cache: [{ + key: 'a-value' + }] + } end - context 'with job:rules:[variables:]' do + it { is_expected.to include(options: { cache: [a_hash_including(key: 'a-value')] }) } + + context 'with cache:key:files' do let(:attributes) do - { name: 'rspec', + { + name: 'rspec', ref: 'master', - job_variables: [{ key: 'VAR1', value: 'var 1' }, - { key: 'VAR2', value: 'var 2' }], - rules: [{ if: '$VAR == null', variables: { VAR1: 'new var 1', VAR3: 'var 3' } }] } + cache: [{ + key: { + files: ['VERSION'] + } + }] + } end - it do - is_expected.to include(yaml_variables: [{ key: 'VAR1', value: 'new var 1' }, - { key: 'VAR3', value: 'var 3' }, - { key: 'VAR2', value: 'var 2' }]) - end + it 'includes cache options' do + cache_options = { + options: { + cache: [a_hash_including(key: '0-f155568ad0933d8358f66b846133614f76dd0ca4')] + } + } - it 'expects the same results on to_resource' do - expect(seed_build.to_resource.yaml_variables).to include({ key: 'VAR1', value: 'new var 1' }, - { key: 'VAR3', value: 'var 3' }, - { key: 'VAR2', value: 'var 2' }) + is_expected.to include(cache_options) end end - context 'with job:tags' do + context 'with cache:key:prefix' do let(:attributes) do { name: 'rspec', ref: 'master', - job_variables: [{ key: 'VARIABLE', value: 'value' }], - tag_list: ['static-tag', '$VARIABLE', '$NO_VARIABLE'] + cache: [{ + key: { + prefix: 'something' + } + }] } end - it { is_expected.to include(tag_list: ['static-tag', 'value', '$NO_VARIABLE']) } - it { is_expected.to include(yaml_variables: [{ key: 'VARIABLE', value: 'value' }]) } + it { is_expected.to include(options: { cache: [a_hash_including( key: 'something-default' )] }) } end - context 'with cache:key' do + context 'with cache:key:files and prefix' do let(:attributes) do { name: 'rspec', ref: 'master', cache: [{ - key: 'a-value' + key: { + files: ['VERSION'], + prefix: 'something' + } }] } end - it { is_expected.to include(options: { cache: [a_hash_including(key: 'a-value')] }) } - - context 'with cache:key:files' do - let(:attributes) do - { - name: 'rspec', - ref: 'master', - cache: [{ - key: { - files: ['VERSION'] - } - }] - } - end - - it 'includes cache options' do - cache_options = { - options: { - cache: [a_hash_including(key: '0-f155568ad0933d8358f66b846133614f76dd0ca4')] - } + it 'includes cache options' do + cache_options = { + options: { + cache: [a_hash_including(key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4')] } + } - is_expected.to include(cache_options) - end - end - - context 'with cache:key:prefix' do - let(:attributes) do - { - name: 'rspec', - ref: 'master', - cache: [{ - key: { - prefix: 'something' - } - }] - } - end - - it { is_expected.to include(options: { cache: [a_hash_including( key: 'something-default' )] }) } + is_expected.to include(cache_options) end + end + end - context 'with cache:key:files and prefix' do - let(:attributes) do - { - name: 'rspec', - ref: 'master', - cache: [{ - key: { - files: ['VERSION'], - prefix: 'something' - } - }] - } - end + context 'with empty cache' do + let(:attributes) do + { + name: 'rspec', + ref: 'master', + cache: {} + } + end - it 'includes cache options' do - cache_options = { - options: { - cache: [a_hash_including(key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4')] - } - } + it { is_expected.to include({}) } + end - is_expected.to include(cache_options) - end - end + context 'with allow_failure' do + let(:options) do + { allow_failure_criteria: { exit_codes: [42] } } end - context 'with empty cache' do - let(:attributes) do - { - name: 'rspec', - ref: 'master', - cache: {} - } - end + let(:rules) do + [{ if: '$VAR == null', when: 'always' }] + end - it { is_expected.to include({}) } + let(:attributes) do + { + name: 'rspec', + ref: 'master', + options: options, + rules: rules + } end - context 'with allow_failure' do - let(:options) do - { allow_failure_criteria: { exit_codes: [42] } } - end + context 'when rules does not override allow_failure' do + it { is_expected.to match a_hash_including(options: options) } + end + context 'when rules set allow_failure to true' do let(:rules) do - [{ if: '$VAR == null', when: 'always' }] + [{ if: '$VAR == null', when: 'always', allow_failure: true }] end - let(:attributes) do - { - name: 'rspec', - ref: 'master', - options: options, - rules: rules - } - end - - context 'when rules does not override allow_failure' do - it { is_expected.to match a_hash_including(options: options) } - end - - context 'when rules set allow_failure to true' do - let(:rules) do - [{ if: '$VAR == null', when: 'always', allow_failure: true }] - end - - it { is_expected.to match a_hash_including(options: { allow_failure_criteria: nil }) } - - context 'when options contain other static values' do - let(:options) do - { image: 'busybox', allow_failure_criteria: { exit_codes: [42] } } - end - - it { is_expected.to match a_hash_including(options: { image: 'busybox', allow_failure_criteria: nil }) } + it { is_expected.to match a_hash_including(options: { allow_failure_criteria: nil }) } + end - it 'deep merges options when exporting to_resource' do - expect(seed_build.to_resource.options).to match a_hash_including( - image: 'busybox', allow_failure_criteria: nil - ) - end - end + context 'when rules set allow_failure to false' do + let(:rules) do + [{ if: '$VAR == null', when: 'always', allow_failure: false }] end - context 'when rules set allow_failure to false' do - let(:rules) do - [{ if: '$VAR == null', when: 'always', allow_failure: false }] - end - - it { is_expected.to match a_hash_including(options: { allow_failure_criteria: nil }) } - end + it { is_expected.to match a_hash_including(options: { allow_failure_criteria: nil }) } end + end - context 'with workflow:rules:[variables:]' do - let(:attributes) do - { name: 'rspec', - ref: 'master', - yaml_variables: [{ key: 'VAR2', value: 'var 2' }, - { key: 'VAR3', value: 'var 3' }], - job_variables: [{ key: 'VAR2', value: 'var 2' }, + context 'with workflow:rules:[variables:]' do + let(:attributes) do + { name: 'rspec', + ref: 'master', + yaml_variables: [{ key: 'VAR2', value: 'var 2' }, { key: 'VAR3', value: 'var 3' }], - root_variables_inheritance: root_variables_inheritance } - end - - context 'when the pipeline has variables' do - let(:root_variables) do - [{ key: 'VAR1', value: 'var overridden pipeline 1' }, - { key: 'VAR2', value: 'var pipeline 2' }, - { key: 'VAR3', value: 'var pipeline 3' }, - { key: 'VAR4', value: 'new var pipeline 4' }] - end - - context 'when root_variables_inheritance is true' do - let(:root_variables_inheritance) { true } + job_variables: [{ key: 'VAR2', value: 'var 2' }, + { key: 'VAR3', value: 'var 3' }], + root_variables_inheritance: root_variables_inheritance } + end - it 'returns calculated yaml variables' do - expect(subject[:yaml_variables]).to match_array( - [{ key: 'VAR1', value: 'var overridden pipeline 1' }, - { key: 'VAR2', value: 'var 2' }, - { key: 'VAR3', value: 'var 3' }, - { key: 'VAR4', value: 'new var pipeline 4' }] - ) - end - end + context 'when the pipeline has variables' do + let(:root_variables) do + [{ key: 'VAR1', value: 'var overridden pipeline 1' }, + { key: 'VAR2', value: 'var pipeline 2' }, + { key: 'VAR3', value: 'var pipeline 3' }, + { key: 'VAR4', value: 'new var pipeline 4' }] + end - context 'when root_variables_inheritance is false' do - let(:root_variables_inheritance) { false } + context 'when root_variables_inheritance is true' do + let(:root_variables_inheritance) { true } - it 'returns job variables' do - expect(subject[:yaml_variables]).to match_array( - [{ key: 'VAR2', value: 'var 2' }, - { key: 'VAR3', value: 'var 3' }] - ) - end + it 'returns calculated yaml variables' do + expect(subject[:yaml_variables]).to match_array( + [{ key: 'VAR1', value: 'var overridden pipeline 1' }, + { key: 'VAR2', value: 'var 2' }, + { key: 'VAR3', value: 'var 3' }, + { key: 'VAR4', value: 'new var pipeline 4' }] + ) end + end - context 'when root_variables_inheritance is an array' do - let(:root_variables_inheritance) { %w(VAR1 VAR2 VAR3) } + context 'when root_variables_inheritance is false' do + let(:root_variables_inheritance) { false } - it 'returns calculated yaml variables' do - expect(subject[:yaml_variables]).to match_array( - [{ key: 'VAR1', value: 'var overridden pipeline 1' }, - { key: 'VAR2', value: 'var 2' }, - { key: 'VAR3', value: 'var 3' }] - ) - end + it 'returns job variables' do + expect(subject[:yaml_variables]).to match_array( + [{ key: 'VAR2', value: 'var 2' }, + { key: 'VAR3', value: 'var 3' }] + ) end end - context 'when the pipeline has not a variable' do - let(:root_variables_inheritance) { true } + context 'when root_variables_inheritance is an array' do + let(:root_variables_inheritance) { %w(VAR1 VAR2 VAR3) } - it 'returns seed yaml variables' do + it 'returns calculated yaml variables' do expect(subject[:yaml_variables]).to match_array( - [{ key: 'VAR2', value: 'var 2' }, - { key: 'VAR3', value: 'var 3' }]) + [{ key: 'VAR1', value: 'var overridden pipeline 1' }, + { key: 'VAR2', value: 'var 2' }, + { key: 'VAR3', value: 'var 3' }] + ) end end end - context 'when the job rule depends on variables' do - let(:attributes) do - { name: 'rspec', - ref: 'master', - yaml_variables: [{ key: 'VAR1', value: 'var 1' }], - job_variables: [{ key: 'VAR1', value: 'var 1' }], - root_variables_inheritance: root_variables_inheritance, - rules: rules } + context 'when the pipeline has not a variable' do + let(:root_variables_inheritance) { true } + + it 'returns seed yaml variables' do + expect(subject[:yaml_variables]).to match_array( + [{ key: 'VAR2', value: 'var 2' }, + { key: 'VAR3', value: 'var 3' }]) end + end + end - let(:root_variables_inheritance) { true } + context 'when the job rule depends on variables' do + let(:attributes) do + { name: 'rspec', + ref: 'master', + yaml_variables: [{ key: 'VAR1', value: 'var 1' }], + job_variables: [{ key: 'VAR1', value: 'var 1' }], + root_variables_inheritance: root_variables_inheritance, + rules: rules } + end - context 'when the rules use job variables' do - let(:rules) do - [{ if: '$VAR1 == "var 1"', variables: { VAR1: 'overridden var 1', VAR2: 'new var 2' } }] - end + let(:root_variables_inheritance) { true } - it 'recalculates the variables' do - expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1' }, - { key: 'VAR2', value: 'new var 2' }) - end + context 'when the rules use job variables' do + let(:rules) do + [{ if: '$VAR1 == "var 1"', variables: { VAR1: 'overridden var 1', VAR2: 'new var 2' } }] end - context 'when the rules use root variables' do - let(:root_variables) do - [{ key: 'VAR2', value: 'var pipeline 2' }] - end + it 'recalculates the variables' do + expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1' }, + { key: 'VAR2', value: 'new var 2' }) + end + end - let(:rules) do - [{ if: '$VAR2 == "var pipeline 2"', variables: { VAR1: 'overridden var 1', VAR2: 'overridden var 2' } }] - end + context 'when the rules use root variables' do + let(:root_variables) do + [{ key: 'VAR2', value: 'var pipeline 2' }] + end - it 'recalculates the variables' do - expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1' }, - { key: 'VAR2', value: 'overridden var 2' }) - end + let(:rules) do + [{ if: '$VAR2 == "var pipeline 2"', variables: { VAR1: 'overridden var 1', VAR2: 'overridden var 2' } }] + end - context 'when the root_variables_inheritance is false' do - let(:root_variables_inheritance) { false } + it 'recalculates the variables' do + expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1' }, + { key: 'VAR2', value: 'overridden var 2' }) + end - it 'does not recalculate the variables' do - expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'var 1' }) - end + context 'when the root_variables_inheritance is false' do + let(:root_variables_inheritance) { false } + + it 'does not recalculate the variables' do + expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'var 1' }) end end end end + end + + describe '#bridge?' do + subject { seed_build.bridge? } + + context 'when job is a downstream bridge' do + let(:attributes) do + { name: 'rspec', ref: 'master', options: { trigger: 'my/project' } } + end - describe '#bridge?' do - subject { seed_build.bridge? } + it { is_expected.to be_truthy } - context 'when job is a downstream bridge' do + context 'when trigger definition is empty' do let(:attributes) do - { name: 'rspec', ref: 'master', options: { trigger: 'my/project' } } + { name: 'rspec', ref: 'master', options: { trigger: '' } } end - it { is_expected.to be_truthy } - - context 'when trigger definition is empty' do - let(:attributes) do - { name: 'rspec', ref: 'master', options: { trigger: '' } } - end + it { is_expected.to be_falsey } + end + end - it { is_expected.to be_falsey } - end + context 'when job is an upstream bridge' do + let(:attributes) do + { name: 'rspec', ref: 'master', options: { bridge_needs: { pipeline: 'my/project' } } } end - context 'when job is an upstream bridge' do + it { is_expected.to be_truthy } + + context 'when upstream definition is empty' do let(:attributes) do - { name: 'rspec', ref: 'master', options: { bridge_needs: { pipeline: 'my/project' } } } + { name: 'rspec', ref: 'master', options: { bridge_needs: { pipeline: '' } } } end - it { is_expected.to be_truthy } + it { is_expected.to be_falsey } + end + end - context 'when upstream definition is empty' do - let(:attributes) do - { name: 'rspec', ref: 'master', options: { bridge_needs: { pipeline: '' } } } - end + context 'when job is not a bridge' do + it { is_expected.to be_falsey } + end + end - it { is_expected.to be_falsey } - end - end + describe '#to_resource' do + subject { seed_build.to_resource } - context 'when job is not a bridge' do - it { is_expected.to be_falsey } - end + it 'memoizes a resource object' do + expect(subject.object_id).to eq seed_build.to_resource.object_id end - describe '#to_resource' do - subject { seed_build.to_resource } + it 'can not be persisted without explicit assignment' do + pipeline.save! - it 'memoizes a resource object' do - expect(subject.object_id).to eq seed_build.to_resource.object_id - end + expect(subject).not_to be_persisted + end + end - it 'can not be persisted without explicit assignment' do - pipeline.save! + describe 'applying job inclusion policies' do + subject { seed_build } - expect(subject).not_to be_persisted + context 'when no branch policy is specified' do + let(:attributes) do + { name: 'rspec' } end - end - describe 'applying job inclusion policies' do - subject { seed_build } + it { is_expected.to be_included } + end - context 'when no branch policy is specified' do + context 'when branch policy does not match' do + context 'when using only' do let(:attributes) do - { name: 'rspec' } + { name: 'rspec', only: { refs: ['deploy'] } } end - it { is_expected.to be_included } + it { is_expected.not_to be_included } end - context 'when branch policy does not match' do - context 'when using only' do - let(:attributes) do - { name: 'rspec', only: { refs: ['deploy'] } } - end - - it { is_expected.not_to be_included } + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: ['deploy'] } } end - context 'when using except' do - let(:attributes) do - { name: 'rspec', except: { refs: ['deploy'] } } - end + it { is_expected.to be_included } + end - it { is_expected.to be_included } + context 'with both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: %w[deploy] }, + except: { refs: %w[deploy] } + } end - context 'with both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: %w[deploy] }, - except: { refs: %w[deploy] } - } - end - - it { is_expected.not_to be_included } - end + it { is_expected.not_to be_included } end + end - context 'when branch regexp policy does not match' do - context 'when using only' do - let(:attributes) do - { name: 'rspec', only: { refs: %w[/^deploy$/] } } - end - - it { is_expected.not_to be_included } + context 'when branch regexp policy does not match' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: %w[/^deploy$/] } } end - context 'when using except' do - let(:attributes) do - { name: 'rspec', except: { refs: %w[/^deploy$/] } } - end + it { is_expected.not_to be_included } + end - it { is_expected.to be_included } + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: %w[/^deploy$/] } } end - context 'with both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: %w[/^deploy$/] }, - except: { refs: %w[/^deploy$/] } - } - end + it { is_expected.to be_included } + end - it { is_expected.not_to be_included } + context 'with both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: %w[/^deploy$/] }, + except: { refs: %w[/^deploy$/] } + } end - end - context 'when branch policy matches' do - context 'when using only' do - let(:attributes) do - { name: 'rspec', only: { refs: %w[deploy master] } } - end + it { is_expected.not_to be_included } + end + end - it { is_expected.to be_included } + context 'when branch policy matches' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: %w[deploy master] } } end - context 'when using except' do - let(:attributes) do - { name: 'rspec', except: { refs: %w[deploy master] } } - end + it { is_expected.to be_included } + end - it { is_expected.not_to be_included } + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: %w[deploy master] } } end - context 'when using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: %w[deploy master] }, - except: { refs: %w[deploy master] } - } - end + it { is_expected.not_to be_included } + end - it { is_expected.not_to be_included } + context 'when using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: %w[deploy master] }, + except: { refs: %w[deploy master] } + } end - end - context 'when keyword policy matches' do - context 'when using only' do - let(:attributes) do - { name: 'rspec', only: { refs: %w[branches] } } - end + it { is_expected.not_to be_included } + end + end - it { is_expected.to be_included } + context 'when keyword policy matches' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: %w[branches] } } end - context 'when using except' do - let(:attributes) do - { name: 'rspec', except: { refs: %w[branches] } } - end + it { is_expected.to be_included } + end - it { is_expected.not_to be_included } + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: %w[branches] } } end - context 'when using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: %w[branches] }, - except: { refs: %w[branches] } - } - end + it { is_expected.not_to be_included } + end - it { is_expected.not_to be_included } + context 'when using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: %w[branches] }, + except: { refs: %w[branches] } + } end - end - context 'when keyword policy does not match' do - context 'when using only' do - let(:attributes) do - { name: 'rspec', only: { refs: %w[tags] } } - end + it { is_expected.not_to be_included } + end + end - it { is_expected.not_to be_included } + context 'when keyword policy does not match' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: %w[tags] } } end - context 'when using except' do - let(:attributes) do - { name: 'rspec', except: { refs: %w[tags] } } - end + it { is_expected.not_to be_included } + end - it { is_expected.to be_included } + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: %w[tags] } } end - context 'when using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: %w[tags] }, - except: { refs: %w[tags] } - } - end + it { is_expected.to be_included } + end - it { is_expected.not_to be_included } + context 'when using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: %w[tags] }, + except: { refs: %w[tags] } + } end - end - context 'with source-keyword policy' do - using RSpec::Parameterized + it { is_expected.not_to be_included } + end + end - let(:pipeline) do - build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source, project: project) - end + context 'with source-keyword policy' do + using RSpec::Parameterized - context 'matches' do - where(:keyword, :source) do - [ - %w[pushes push], - %w[web web], - %w[triggers trigger], - %w[schedules schedule], - %w[api api], - %w[external external] - ] - end + let(:pipeline) do + build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source, project: project) + end - with_them do - context 'using an only policy' do - let(:attributes) do - { name: 'rspec', only: { refs: [keyword] } } - end + context 'matches' do + where(:keyword, :source) do + [ + %w[pushes push], + %w[web web], + %w[triggers trigger], + %w[schedules schedule], + %w[api api], + %w[external external] + ] + end - it { is_expected.to be_included } + with_them do + context 'using an only policy' do + let(:attributes) do + { name: 'rspec', only: { refs: [keyword] } } end - context 'using an except policy' do - let(:attributes) do - { name: 'rspec', except: { refs: [keyword] } } - end + it { is_expected.to be_included } + end - it { is_expected.not_to be_included } + context 'using an except policy' do + let(:attributes) do + { name: 'rspec', except: { refs: [keyword] } } end - context 'using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: [keyword] }, - except: { refs: [keyword] } - } - end + it { is_expected.not_to be_included } + end - it { is_expected.not_to be_included } + context 'using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: [keyword] }, + except: { refs: [keyword] } + } end - end - end - context 'non-matches' do - where(:keyword, :source) do - %w[web trigger schedule api external].map { |source| ['pushes', source] } + - %w[push trigger schedule api external].map { |source| ['web', source] } + - %w[push web schedule api external].map { |source| ['triggers', source] } + - %w[push web trigger api external].map { |source| ['schedules', source] } + - %w[push web trigger schedule external].map { |source| ['api', source] } + - %w[push web trigger schedule api].map { |source| ['external', source] } + it { is_expected.not_to be_included } end + end + end - with_them do - context 'using an only policy' do - let(:attributes) do - { name: 'rspec', only: { refs: [keyword] } } - end + context 'non-matches' do + where(:keyword, :source) do + %w[web trigger schedule api external].map { |source| ['pushes', source] } + + %w[push trigger schedule api external].map { |source| ['web', source] } + + %w[push web schedule api external].map { |source| ['triggers', source] } + + %w[push web trigger api external].map { |source| ['schedules', source] } + + %w[push web trigger schedule external].map { |source| ['api', source] } + + %w[push web trigger schedule api].map { |source| ['external', source] } + end - it { is_expected.not_to be_included } + with_them do + context 'using an only policy' do + let(:attributes) do + { name: 'rspec', only: { refs: [keyword] } } end - context 'using an except policy' do - let(:attributes) do - { name: 'rspec', except: { refs: [keyword] } } - end + it { is_expected.not_to be_included } + end - it { is_expected.to be_included } + context 'using an except policy' do + let(:attributes) do + { name: 'rspec', except: { refs: [keyword] } } end - context 'using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: [keyword] }, - except: { refs: [keyword] } - } - end + it { is_expected.to be_included } + end - it { is_expected.not_to be_included } + context 'using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: [keyword] }, + except: { refs: [keyword] } + } end + + it { is_expected.not_to be_included } end end end + end - context 'when repository path matches' do - context 'when using only' do - let(:attributes) do - { name: 'rspec', only: { refs: ["branches@#{pipeline.project_full_path}"] } } - end - - it { is_expected.to be_included } + context 'when repository path matches' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: ["branches@#{pipeline.project_full_path}"] } } end - context 'when using except' do - let(:attributes) do - { name: 'rspec', except: { refs: ["branches@#{pipeline.project_full_path}"] } } - end + it { is_expected.to be_included } + end - it { is_expected.not_to be_included } + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: ["branches@#{pipeline.project_full_path}"] } } end - context 'when using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: ["branches@#{pipeline.project_full_path}"] }, - except: { refs: ["branches@#{pipeline.project_full_path}"] } - } - end + it { is_expected.not_to be_included } + end - it { is_expected.not_to be_included } + context 'when using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: ["branches@#{pipeline.project_full_path}"] }, + except: { refs: ["branches@#{pipeline.project_full_path}"] } + } end - context 'when using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { - refs: ["branches@#{pipeline.project_full_path}"] - }, - except: { - refs: ["branches@#{pipeline.project_full_path}"] - } - } - end - - it { is_expected.not_to be_included } - end + it { is_expected.not_to be_included } end - context 'when repository path does not match' do - context 'when using only' do - let(:attributes) do - { name: 'rspec', only: { refs: %w[branches@fork] } } - end - - it { is_expected.not_to be_included } + context 'when using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { + refs: ["branches@#{pipeline.project_full_path}"] + }, + except: { + refs: ["branches@#{pipeline.project_full_path}"] + } + } end - context 'when using except' do - let(:attributes) do - { name: 'rspec', except: { refs: %w[branches@fork] } } - end + it { is_expected.not_to be_included } + end + end - it { is_expected.to be_included } + context 'when repository path does not match' do + context 'when using only' do + let(:attributes) do + { name: 'rspec', only: { refs: %w[branches@fork] } } end - context 'when using both only and except policies' do - let(:attributes) do - { - name: 'rspec', - only: { refs: %w[branches@fork] }, - except: { refs: %w[branches@fork] } - } - end + it { is_expected.not_to be_included } + end - it { is_expected.not_to be_included } + context 'when using except' do + let(:attributes) do + { name: 'rspec', except: { refs: %w[branches@fork] } } end + + it { is_expected.to be_included } end - context 'using rules:' do - using RSpec::Parameterized + context 'when using both only and except policies' do + let(:attributes) do + { + name: 'rspec', + only: { refs: %w[branches@fork] }, + except: { refs: %w[branches@fork] } + } + end - let(:attributes) { { name: 'rspec', rules: rule_set, when: 'on_success' } } + it { is_expected.not_to be_included } + end + end - context 'with a matching if: rule' do - context 'with an explicit `when: never`' do - where(:rule_set) do - [ - [[{ if: '$VARIABLE == null', when: 'never' }]], - [[{ if: '$VARIABLE == null', when: 'never' }, { if: '$VARIABLE == null', when: 'always' }]], - [[{ if: '$VARIABLE != "the wrong value"', when: 'never' }, { if: '$VARIABLE == null', when: 'always' }]] - ] - end + context 'using rules:' do + using RSpec::Parameterized - with_them do - it { is_expected.not_to be_included } + let(:attributes) { { name: 'rspec', rules: rule_set, when: 'on_success' } } - it 'still correctly populates when:' do - expect(seed_build.attributes).to include(when: 'never') - end - end + context 'with a matching if: rule' do + context 'with an explicit `when: never`' do + where(:rule_set) do + [ + [[{ if: '$VARIABLE == null', when: 'never' }]], + [[{ if: '$VARIABLE == null', when: 'never' }, { if: '$VARIABLE == null', when: 'always' }]], + [[{ if: '$VARIABLE != "the wrong value"', when: 'never' }, { if: '$VARIABLE == null', when: 'always' }]] + ] end - context 'with an explicit `when: always`' do - where(:rule_set) do - [ - [[{ if: '$VARIABLE == null', when: 'always' }]], - [[{ if: '$VARIABLE == null', when: 'always' }, { if: '$VARIABLE == null', when: 'never' }]], - [[{ if: '$VARIABLE != "the wrong value"', when: 'always' }, { if: '$VARIABLE == null', when: 'never' }]] - ] - end - - with_them do - it { is_expected.to be_included } + with_them do + it { is_expected.not_to be_included } - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'always') - end + it 'still correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') end end + end - context 'with an explicit `when: on_failure`' do - where(:rule_set) do - [ - [[{ if: '$CI_JOB_NAME == "rspec" && $VAR == null', when: 'on_failure' }]], - [[{ if: '$VARIABLE != null', when: 'delayed', start_in: '1 day' }, { if: '$CI_JOB_NAME == "rspec"', when: 'on_failure' }]], - [[{ if: '$VARIABLE == "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$CI_BUILD_NAME == "rspec"', when: 'on_failure' }]] - ] - end - - with_them do - it { is_expected.to be_included } - - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'on_failure') - end - end + context 'with an explicit `when: always`' do + where(:rule_set) do + [ + [[{ if: '$VARIABLE == null', when: 'always' }]], + [[{ if: '$VARIABLE == null', when: 'always' }, { if: '$VARIABLE == null', when: 'never' }]], + [[{ if: '$VARIABLE != "the wrong value"', when: 'always' }, { if: '$VARIABLE == null', when: 'never' }]] + ] end - context 'with an explicit `when: delayed`' do - where(:rule_set) do - [ - [[{ if: '$VARIABLE == null', when: 'delayed', start_in: '1 day' }]], - [[{ if: '$VARIABLE == null', when: 'delayed', start_in: '1 day' }, { if: '$VARIABLE == null', when: 'never' }]], - [[{ if: '$VARIABLE != "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$VARIABLE == null', when: 'never' }]] - ] - end - - with_them do - it { is_expected.to be_included } + with_them do + it { is_expected.to be_included } - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'delayed', options: { start_in: '1 day' }) - end + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'always') end end + end - context 'without an explicit when: value' do - where(:rule_set) do - [ - [[{ if: '$VARIABLE == null' }]], - [[{ if: '$VARIABLE == null' }, { if: '$VARIABLE == null' }]], - [[{ if: '$VARIABLE != "the wrong value"' }, { if: '$VARIABLE == null' }]] - ] - end + context 'with an explicit `when: on_failure`' do + where(:rule_set) do + [ + [[{ if: '$CI_JOB_NAME == "rspec" && $VAR == null', when: 'on_failure' }]], + [[{ if: '$VARIABLE != null', when: 'delayed', start_in: '1 day' }, { if: '$CI_JOB_NAME == "rspec"', when: 'on_failure' }]], + [[{ if: '$VARIABLE == "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$CI_BUILD_NAME == "rspec"', when: 'on_failure' }]] + ] + end - with_them do - it { is_expected.to be_included } + with_them do + it { is_expected.to be_included } - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'on_success') - end + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'on_failure') end end end - context 'with a matching changes: rule' do - let(:pipeline) do - build(:ci_pipeline, project: project).tap do |pipeline| - stub_pipeline_modified_paths(pipeline, %w[app/models/ci/pipeline.rb spec/models/ci/pipeline_spec.rb .gitlab-ci.yml]) - end + context 'with an explicit `when: delayed`' do + where(:rule_set) do + [ + [[{ if: '$VARIABLE == null', when: 'delayed', start_in: '1 day' }]], + [[{ if: '$VARIABLE == null', when: 'delayed', start_in: '1 day' }, { if: '$VARIABLE == null', when: 'never' }]], + [[{ if: '$VARIABLE != "the wrong value"', when: 'delayed', start_in: '1 day' }, { if: '$VARIABLE == null', when: 'never' }]] + ] end - context 'with an explicit `when: never`' do - where(:rule_set) do - [ - [[{ changes: { paths: %w[*/**/*.rb] }, when: 'never' }, { changes: { paths: %w[*/**/*.rb] }, when: 'always' }]], - [[{ changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'never' }, { changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'always' }]], - [[{ changes: { paths: %w[spec/**/*.rb] }, when: 'never' }, { changes: { paths: %w[spec/**/*.rb] }, when: 'always' }]], - [[{ changes: { paths: %w[*.yml] }, when: 'never' }, { changes: { paths: %w[*.yml] }, when: 'always' }]], - [[{ changes: { paths: %w[.*.yml] }, when: 'never' }, { changes: { paths: %w[.*.yml] }, when: 'always' }]], - [[{ changes: { paths: %w[**/*] }, when: 'never' }, { changes: { paths: %w[**/*] }, when: 'always' }]], - [[{ changes: { paths: %w[*/**/*.rb *.yml] }, when: 'never' }, { changes: { paths: %w[*/**/*.rb *.yml] }, when: 'always' }]], - [[{ changes: { paths: %w[.*.yml **/*] }, when: 'never' }, { changes: { paths: %w[.*.yml **/*] }, when: 'always' }]] - ] - end - - with_them do - it { is_expected.not_to be_included } + with_them do + it { is_expected.to be_included } - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'never') - end + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'delayed', options: { start_in: '1 day' }) end end + end - context 'with an explicit `when: always`' do - where(:rule_set) do - [ - [[{ changes: { paths: %w[*/**/*.rb] }, when: 'always' }, { changes: { paths: %w[*/**/*.rb] }, when: 'never' }]], - [[{ changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'always' }, { changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'never' }]], - [[{ changes: { paths: %w[spec/**/*.rb] }, when: 'always' }, { changes: { paths: %w[spec/**/*.rb] }, when: 'never' }]], - [[{ changes: { paths: %w[*.yml] }, when: 'always' }, { changes: { paths: %w[*.yml] }, when: 'never' }]], - [[{ changes: { paths: %w[.*.yml] }, when: 'always' }, { changes: { paths: %w[.*.yml] }, when: 'never' }]], - [[{ changes: { paths: %w[**/*] }, when: 'always' }, { changes: { paths: %w[**/*] }, when: 'never' }]], - [[{ changes: { paths: %w[*/**/*.rb *.yml] }, when: 'always' }, { changes: { paths: %w[*/**/*.rb *.yml] }, when: 'never' }]], - [[{ changes: { paths: %w[.*.yml **/*] }, when: 'always' }, { changes: { paths: %w[.*.yml **/*] }, when: 'never' }]] - ] - end + context 'without an explicit when: value' do + where(:rule_set) do + [ + [[{ if: '$VARIABLE == null' }]], + [[{ if: '$VARIABLE == null' }, { if: '$VARIABLE == null' }]], + [[{ if: '$VARIABLE != "the wrong value"' }, { if: '$VARIABLE == null' }]] + ] + end - with_them do - it { is_expected.to be_included } + with_them do + it { is_expected.to be_included } - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'always') - end + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'on_success') end end + end + end - context 'without an explicit when: value' do - where(:rule_set) do - [ - [[{ changes: { paths: %w[*/**/*.rb] } }]], - [[{ changes: { paths: %w[app/models/ci/pipeline.rb] } }]], - [[{ changes: { paths: %w[spec/**/*.rb] } }]], - [[{ changes: { paths: %w[*.yml] } }]], - [[{ changes: { paths: %w[.*.yml] } }]], - [[{ changes: { paths: %w[**/*] } }]], - [[{ changes: { paths: %w[*/**/*.rb *.yml] } }]], - [[{ changes: { paths: %w[.*.yml **/*] } }]] - ] - end - - with_them do - it { is_expected.to be_included } - - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'on_success') - end - end + context 'with a matching changes: rule' do + let(:pipeline) do + build(:ci_pipeline, project: project).tap do |pipeline| + stub_pipeline_modified_paths(pipeline, %w[app/models/ci/pipeline.rb spec/models/ci/pipeline_spec.rb .gitlab-ci.yml]) end end - context 'with no matching rule' do + context 'with an explicit `when: never`' do where(:rule_set) do [ - [[{ if: '$VARIABLE != null', when: 'never' }]], - [[{ if: '$VARIABLE != null', when: 'never' }, { if: '$VARIABLE != null', when: 'always' }]], - [[{ if: '$VARIABLE == "the wrong value"', when: 'never' }, { if: '$VARIABLE != null', when: 'always' }]], - [[{ if: '$VARIABLE != null', when: 'always' }]], - [[{ if: '$VARIABLE != null', when: 'always' }, { if: '$VARIABLE != null', when: 'never' }]], - [[{ if: '$VARIABLE == "the wrong value"', when: 'always' }, { if: '$VARIABLE != null', when: 'never' }]], - [[{ if: '$VARIABLE != null' }]], - [[{ if: '$VARIABLE != null' }, { if: '$VARIABLE != null' }]], - [[{ if: '$VARIABLE == "the wrong value"' }, { if: '$VARIABLE != null' }]] + [[{ changes: { paths: %w[*/**/*.rb] }, when: 'never' }, { changes: { paths: %w[*/**/*.rb] }, when: 'always' }]], + [[{ changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'never' }, { changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'always' }]], + [[{ changes: { paths: %w[spec/**/*.rb] }, when: 'never' }, { changes: { paths: %w[spec/**/*.rb] }, when: 'always' }]], + [[{ changes: { paths: %w[*.yml] }, when: 'never' }, { changes: { paths: %w[*.yml] }, when: 'always' }]], + [[{ changes: { paths: %w[.*.yml] }, when: 'never' }, { changes: { paths: %w[.*.yml] }, when: 'always' }]], + [[{ changes: { paths: %w[**/*] }, when: 'never' }, { changes: { paths: %w[**/*] }, when: 'always' }]], + [[{ changes: { paths: %w[*/**/*.rb *.yml] }, when: 'never' }, { changes: { paths: %w[*/**/*.rb *.yml] }, when: 'always' }]], + [[{ changes: { paths: %w[.*.yml **/*] }, when: 'never' }, { changes: { paths: %w[.*.yml **/*] }, when: 'always' }]] ] end @@ -971,249 +878,291 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build, feature_category: :pipeline_au end end - context 'with a rule using CI_ENVIRONMENT_NAME variable' do - let(:rule_set) do - [{ if: '$CI_ENVIRONMENT_NAME == "test"' }] + context 'with an explicit `when: always`' do + where(:rule_set) do + [ + [[{ changes: { paths: %w[*/**/*.rb] }, when: 'always' }, { changes: { paths: %w[*/**/*.rb] }, when: 'never' }]], + [[{ changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'always' }, { changes: { paths: %w[app/models/ci/pipeline.rb] }, when: 'never' }]], + [[{ changes: { paths: %w[spec/**/*.rb] }, when: 'always' }, { changes: { paths: %w[spec/**/*.rb] }, when: 'never' }]], + [[{ changes: { paths: %w[*.yml] }, when: 'always' }, { changes: { paths: %w[*.yml] }, when: 'never' }]], + [[{ changes: { paths: %w[.*.yml] }, when: 'always' }, { changes: { paths: %w[.*.yml] }, when: 'never' }]], + [[{ changes: { paths: %w[**/*] }, when: 'always' }, { changes: { paths: %w[**/*] }, when: 'never' }]], + [[{ changes: { paths: %w[*/**/*.rb *.yml] }, when: 'always' }, { changes: { paths: %w[*/**/*.rb *.yml] }, when: 'never' }]], + [[{ changes: { paths: %w[.*.yml **/*] }, when: 'always' }, { changes: { paths: %w[.*.yml **/*] }, when: 'never' }]] + ] end - context 'when environment:name satisfies the rule' do - let(:attributes) { { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success' } } - + with_them do it { is_expected.to be_included } it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'on_success') + expect(seed_build.attributes).to include(when: 'always') end end + end - context 'when environment:name does not satisfy rule' do - let(:attributes) { { name: 'rspec', rules: rule_set, environment: 'dev', when: 'on_success' } } - - it { is_expected.not_to be_included } - - it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'never') - end + context 'without an explicit when: value' do + where(:rule_set) do + [ + [[{ changes: { paths: %w[*/**/*.rb] } }]], + [[{ changes: { paths: %w[app/models/ci/pipeline.rb] } }]], + [[{ changes: { paths: %w[spec/**/*.rb] } }]], + [[{ changes: { paths: %w[*.yml] } }]], + [[{ changes: { paths: %w[.*.yml] } }]], + [[{ changes: { paths: %w[**/*] } }]], + [[{ changes: { paths: %w[*/**/*.rb *.yml] } }]], + [[{ changes: { paths: %w[.*.yml **/*] } }]] + ] end - context 'when environment:name is not set' do - it { is_expected.not_to be_included } + with_them do + it { is_expected.to be_included } it 'correctly populates when:' do - expect(seed_build.attributes).to include(when: 'never') + expect(seed_build.attributes).to include(when: 'on_success') end end end + end - context 'with no rules' do - let(:rule_set) { [] } + context 'with no matching rule' do + where(:rule_set) do + [ + [[{ if: '$VARIABLE != null', when: 'never' }]], + [[{ if: '$VARIABLE != null', when: 'never' }, { if: '$VARIABLE != null', when: 'always' }]], + [[{ if: '$VARIABLE == "the wrong value"', when: 'never' }, { if: '$VARIABLE != null', when: 'always' }]], + [[{ if: '$VARIABLE != null', when: 'always' }]], + [[{ if: '$VARIABLE != null', when: 'always' }, { if: '$VARIABLE != null', when: 'never' }]], + [[{ if: '$VARIABLE == "the wrong value"', when: 'always' }, { if: '$VARIABLE != null', when: 'never' }]], + [[{ if: '$VARIABLE != null' }]], + [[{ if: '$VARIABLE != null' }, { if: '$VARIABLE != null' }]], + [[{ if: '$VARIABLE == "the wrong value"' }, { if: '$VARIABLE != null' }]] + ] + end + with_them do it { is_expected.not_to be_included } it 'correctly populates when:' do expect(seed_build.attributes).to include(when: 'never') end end + end + + context 'with a rule using CI_ENVIRONMENT_NAME variable' do + let(:rule_set) do + [{ if: '$CI_ENVIRONMENT_NAME == "test"' }] + end - context 'with invalid rules raising error' do - let(:rule_set) do - [ - { changes: { paths: ['README.md'], compare_to: 'invalid-ref' }, when: 'never' } - ] + context 'when environment:name satisfies the rule' do + let(:attributes) { { name: 'rspec', rules: rule_set, environment: 'test', when: 'on_success' } } + + it { is_expected.to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'on_success') end + end + + context 'when environment:name does not satisfy rule' do + let(:attributes) { { name: 'rspec', rules: rule_set, environment: 'dev', when: 'on_success' } } it { is_expected.not_to be_included } it 'correctly populates when:' do expect(seed_build.attributes).to include(when: 'never') end + end - it 'returns an error' do - expect(seed_build.errors).to contain_exactly( - 'Failed to parse rule for rspec: rules:changes:compare_to is not a valid ref' - ) + context 'when environment:name is not set' do + it { is_expected.not_to be_included } + + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') end end end - end - - describe 'applying needs: dependency' do - subject { seed_build } - let(:needs_count) { 1 } + context 'with no rules' do + let(:rule_set) { [] } - let(:needs_attributes) do - Array.new(needs_count, name: 'build') - end + it { is_expected.not_to be_included } - let(:attributes) do - { - name: 'rspec', - needs_attributes: needs_attributes - } + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end end - context 'when build job is not present in prior stages' do - it "is included" do - is_expected.to be_included + context 'with invalid rules raising error' do + let(:rule_set) do + [ + { changes: { paths: ['README.md'], compare_to: 'invalid-ref' }, when: 'never' } + ] end - it "returns an error" do - expect(subject.errors).to contain_exactly( - "'rspec' job needs 'build' job, but 'build' is not in any previous stage") - end + it { is_expected.not_to be_included } - context 'when the needed job is optional' do - let(:needs_attributes) { [{ name: 'build', optional: true }] } + it 'correctly populates when:' do + expect(seed_build.attributes).to include(when: 'never') + end - it "does not return an error" do - expect(subject.errors).to be_empty - end + it 'returns an error' do + expect(seed_build.errors).to contain_exactly( + 'Failed to parse rule for rspec: rules:changes:compare_to is not a valid ref' + ) end end + end + end - context 'when build job is part of prior stages' do - let(:stage_attributes) do - { - name: 'build', - index: 0, - builds: [{ name: 'build' }] - } - end + describe 'applying needs: dependency' do + subject { seed_build } - let(:stage_seed) do - Gitlab::Ci::Pipeline::Seed::Stage.new(seed_context, stage_attributes, []) - end + let(:needs_count) { 1 } - let(:previous_stages) { [stage_seed] } + let(:needs_attributes) do + Array.new(needs_count, name: 'build') + end - it "is included" do - is_expected.to be_included - end + let(:attributes) do + { + name: 'rspec', + needs_attributes: needs_attributes + } + end - it "does not have errors" do - expect(subject.errors).to be_empty - end + context 'when build job is not present in prior stages' do + it "is included" do + is_expected.to be_included end - context 'when build job is part of the same stage' do - let(:current_stage) { double(seeds_names: [attributes[:name], 'build']) } + it "returns an error" do + expect(subject.errors).to contain_exactly( + "'rspec' job needs 'build' job, but 'build' is not in any previous stage") + end - it 'is included' do - is_expected.to be_included - end + context 'when the needed job is optional' do + let(:needs_attributes) { [{ name: 'build', optional: true }] } - it 'does not have errors' do + it "does not return an error" do expect(subject.errors).to be_empty end end + end - context 'when using 101 needs' do - let(:needs_count) { 101 } - - it "returns an error" do - expect(subject.errors).to contain_exactly( - "rspec: one job can only need 50 others, but you have listed 101. See needs keyword documentation for more details") - end + context 'when build job is part of prior stages' do + let(:stage_attributes) do + { + name: 'build', + index: 0, + builds: [{ name: 'build' }] + } + end - context 'when ci_needs_size_limit is set to 100' do - before do - project.actual_limits.update!(ci_needs_size_limit: 100) - end + let(:stage_seed) do + Gitlab::Ci::Pipeline::Seed::Stage.new(seed_context, stage_attributes, []) + end - it "returns an error" do - expect(subject.errors).to contain_exactly( - "rspec: one job can only need 100 others, but you have listed 101. See needs keyword documentation for more details") - end - end + let(:previous_stages) { [stage_seed] } - context 'when ci_needs_size_limit is set to 0' do - before do - project.actual_limits.update!(ci_needs_size_limit: 0) - end + it "is included" do + is_expected.to be_included + end - it "returns an error" do - expect(subject.errors).to contain_exactly( - "rspec: one job can only need 0 others, but you have listed 101. See needs keyword documentation for more details") - end - end + it "does not have errors" do + expect(subject.errors).to be_empty end end - describe 'applying pipeline variables' do - subject { seed_build } + context 'when build job is part of the same stage' do + let(:current_stage) { double(seeds_names: [attributes[:name], 'build']) } - let(:pipeline_variables) { [] } - let(:pipeline) do - build(:ci_empty_pipeline, project: project, sha: head_sha, variables: pipeline_variables) + it 'is included' do + is_expected.to be_included end - context 'containing variable references' do - let(:pipeline_variables) do - [ - build(:ci_pipeline_variable, key: 'A', value: '$B'), - build(:ci_pipeline_variable, key: 'B', value: '$C') - ] - end + it 'does not have errors' do + expect(subject.errors).to be_empty + end + end - it "does not have errors" do - expect(subject.errors).to be_empty - end + context 'when using 101 needs' do + let(:needs_count) { 101 } + + it "returns an error" do + expect(subject.errors).to contain_exactly( + "rspec: one job can only need 50 others, but you have listed 101. See needs keyword documentation for more details") end - context 'containing cyclic reference' do - let(:pipeline_variables) do - [ - build(:ci_pipeline_variable, key: 'A', value: '$B'), - build(:ci_pipeline_variable, key: 'B', value: '$C'), - build(:ci_pipeline_variable, key: 'C', value: '$A') - ] + context 'when ci_needs_size_limit is set to 100' do + before do + project.actual_limits.update!(ci_needs_size_limit: 100) end it "returns an error" do expect(subject.errors).to contain_exactly( - 'rspec: circular variable reference detected: ["A", "B", "C"]') + "rspec: one job can only need 100 others, but you have listed 101. See needs keyword documentation for more details") end + end - context 'with job:rules:[if:]' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$C != null', when: 'always' }] } } - - it "included? does not raise" do - expect { subject.included? }.not_to raise_error - end + context 'when ci_needs_size_limit is set to 0' do + before do + project.actual_limits.update!(ci_needs_size_limit: 0) + end - it "included? returns true" do - expect(subject.included?).to eq(true) - end + it "returns an error" do + expect(subject.errors).to contain_exactly( + "rspec: one job can only need 0 others, but you have listed 101. See needs keyword documentation for more details") end end end end - describe 'feature flag ci_reuse_build_in_seed_context' do - let(:attributes) do - { name: 'rspec', rules: [{ if: '$VARIABLE == null' }], when: 'on_success' } + describe 'applying pipeline variables' do + subject { seed_build } + + let(:pipeline_variables) { [] } + let(:pipeline) do + build(:ci_empty_pipeline, project: project, sha: head_sha, variables: pipeline_variables) end - context 'when enabled' do - it_behaves_like 'build seed' + context 'containing variable references' do + let(:pipeline_variables) do + [ + build(:ci_pipeline_variable, key: 'A', value: '$B'), + build(:ci_pipeline_variable, key: 'B', value: '$C') + ] + end - it 'initializes the build once' do - expect(Ci::Build).to receive(:new).once.and_call_original - seed_build.to_resource + it "does not have errors" do + expect(subject.errors).to be_empty end end - context 'when disabled' do - before do - stub_feature_flags(ci_reuse_build_in_seed_context: false) + context 'containing cyclic reference' do + let(:pipeline_variables) do + [ + build(:ci_pipeline_variable, key: 'A', value: '$B'), + build(:ci_pipeline_variable, key: 'B', value: '$C'), + build(:ci_pipeline_variable, key: 'C', value: '$A') + ] + end + + it "returns an error" do + expect(subject.errors).to contain_exactly( + 'rspec: circular variable reference detected: ["A", "B", "C"]') end - it_behaves_like 'build seed' + context 'with job:rules:[if:]' do + let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$C != null', when: 'always' }] } } + + it "included? does not raise" do + expect { subject.included? }.not_to raise_error + end - it 'initializes the build twice' do - expect(Ci::Build).to receive(:new).twice.and_call_original - seed_build.to_resource + it "included? returns true" do + expect(subject.included?).to eq(true) + end end end end diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb index 68e70525c55..93644aa1497 100644 --- a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb +++ b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb @@ -4,19 +4,25 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Reports::CodequalityReports do let(:codequality_report) { described_class.new } - let(:degradation_1) { build(:codequality_degradation_1) } - let(:degradation_2) { build(:codequality_degradation_2) } + let(:degradation_major) { build(:codequality_degradation, :major) } + let(:degradation_minor) { build(:codequality_degradation, :minor) } + let(:degradation_blocker) { build(:codequality_degradation, :blocker) } + let(:degradation_info) { build(:codequality_degradation, :info) } + let(:degradation_major_2) { build(:codequality_degradation, :major) } + let(:degradation_critical) { build(:codequality_degradation, :critical) } + let(:degradation_uppercase_major) { build(:codequality_degradation, severity: 'MAJOR') } + let(:degradation_unknown) { build(:codequality_degradation, severity: 'unknown') } it { expect(codequality_report.degradations).to eq({}) } describe '#add_degradation' do context 'when there is a degradation' do before do - codequality_report.add_degradation(degradation_1) + codequality_report.add_degradation(degradation_major) end it 'adds degradation to codequality report' do - expect(codequality_report.degradations.keys).to eq([degradation_1[:fingerprint]]) + expect(codequality_report.degradations.keys).to match_array([degradation_major[:fingerprint]]) expect(codequality_report.degradations.values.size).to eq(1) end end @@ -53,8 +59,8 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReports do context 'when there are many degradations' do before do - codequality_report.add_degradation(degradation_1) - codequality_report.add_degradation(degradation_2) + codequality_report.add_degradation(degradation_major) + codequality_report.add_degradation(degradation_minor) end it 'returns the number of degradations' do @@ -68,36 +74,25 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReports do context 'when there are many degradations' do before do - codequality_report.add_degradation(degradation_1) - codequality_report.add_degradation(degradation_2) + codequality_report.add_degradation(degradation_major) + codequality_report.add_degradation(degradation_minor) end it 'returns all degradations' do - expect(all_degradations).to contain_exactly(degradation_1, degradation_2) + expect(all_degradations).to contain_exactly(degradation_major, degradation_minor) end end end describe '#sort_degradations!' do - let(:major) { build(:codequality_degradation, :major) } - let(:minor) { build(:codequality_degradation, :minor) } - let(:blocker) { build(:codequality_degradation, :blocker) } - let(:info) { build(:codequality_degradation, :info) } - let(:major_2) { build(:codequality_degradation, :major) } - let(:critical) { build(:codequality_degradation, :critical) } - let(:uppercase_major) { build(:codequality_degradation, severity: 'MAJOR') } - let(:unknown) { build(:codequality_degradation, severity: 'unknown') } - - let(:codequality_report) { described_class.new } - before do - codequality_report.add_degradation(major) - codequality_report.add_degradation(minor) - codequality_report.add_degradation(blocker) - codequality_report.add_degradation(major_2) - codequality_report.add_degradation(info) - codequality_report.add_degradation(critical) - codequality_report.add_degradation(unknown) + codequality_report.add_degradation(degradation_major) + codequality_report.add_degradation(degradation_minor) + codequality_report.add_degradation(degradation_blocker) + codequality_report.add_degradation(degradation_major_2) + codequality_report.add_degradation(degradation_info) + codequality_report.add_degradation(degradation_critical) + codequality_report.add_degradation(degradation_unknown) codequality_report.sort_degradations! end @@ -105,36 +100,70 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReports do it 'sorts degradations based on severity' do expect(codequality_report.degradations.values).to eq( [ - blocker, - critical, - major, - major_2, - minor, - info, - unknown + degradation_blocker, + degradation_critical, + degradation_major, + degradation_major_2, + degradation_minor, + degradation_info, + degradation_unknown ]) end context 'with non-existence and uppercase severities' do let(:other_report) { described_class.new } - let(:non_existent) { build(:codequality_degradation, severity: 'non-existent') } + let(:degradation_non_existent) { build(:codequality_degradation, severity: 'non-existent') } before do - other_report.add_degradation(blocker) - other_report.add_degradation(uppercase_major) - other_report.add_degradation(minor) - other_report.add_degradation(non_existent) + other_report.add_degradation(degradation_blocker) + other_report.add_degradation(degradation_uppercase_major) + other_report.add_degradation(degradation_minor) + other_report.add_degradation(degradation_non_existent) end it 'sorts unknown last' do expect(other_report.degradations.values).to eq( [ - blocker, - uppercase_major, - minor, - non_existent + degradation_blocker, + degradation_uppercase_major, + degradation_minor, + degradation_non_existent ]) end end end + + describe '#code_quality_report_summary' do + context "when there is no degradation" do + it 'return nil' do + expect(codequality_report.code_quality_report_summary).to eq(nil) + end + end + + context "when there are degradations" do + before do + codequality_report.add_degradation(degradation_major) + codequality_report.add_degradation(degradation_major_2) + codequality_report.add_degradation(degradation_minor) + codequality_report.add_degradation(degradation_blocker) + codequality_report.add_degradation(degradation_info) + codequality_report.add_degradation(degradation_critical) + codequality_report.add_degradation(degradation_unknown) + end + + it 'returns the summary of the code quality report' do + expect(codequality_report.code_quality_report_summary).to eq( + { + 'major' => 2, + 'minor' => 1, + 'blocker' => 1, + 'info' => 1, + 'critical' => 1, + 'unknown' => 1, + 'count' => 7 + } + ) + end + end + end end diff --git a/spec/lib/gitlab/ci/runner_instructions_spec.rb b/spec/lib/gitlab/ci/runner_instructions_spec.rb index 56f69720b87..31c53d4a030 100644 --- a/spec/lib/gitlab/ci/runner_instructions_spec.rb +++ b/spec/lib/gitlab/ci/runner_instructions_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::RunnerInstructions do +RSpec.describe Gitlab::Ci::RunnerInstructions, feature_category: :runner_fleet do using RSpec::Parameterized::TableSyntax let(:params) { {} } @@ -29,7 +29,6 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do context name do it 'has the required fields' do expect(subject).to have_key(:human_readable_name) - expect(subject).to have_key(:installation_instructions_url) end end end diff --git a/spec/lib/gitlab/ci/runner_releases_spec.rb b/spec/lib/gitlab/ci/runner_releases_spec.rb index ad1e9b12b8a..14f3c95ec79 100644 --- a/spec/lib/gitlab/ci/runner_releases_spec.rb +++ b/spec/lib/gitlab/ci/runner_releases_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::RunnerReleases do +RSpec.describe Gitlab::Ci::RunnerReleases, feature_category: :runner_fleet do subject { described_class.instance } let(:runner_releases_url) { 'http://testurl.com/runner_public_releases' } diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb index 55c3834bfa7..526d6cba657 100644 --- a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb +++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do +RSpec.describe Gitlab::Ci::RunnerUpgradeCheck, feature_category: :runner_fleet do using RSpec::Parameterized::TableSyntax subject(:instance) { described_class.new(gitlab_version, runner_releases) } @@ -51,8 +51,8 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do context 'with runner_version from last minor release' do let(:runner_version) { 'v14.0.1' } - it 'returns :not_available' do - is_expected.to eq([parsed_runner_version, :not_available]) + it 'returns :unavailable' do + is_expected.to eq([parsed_runner_version, :unavailable]) end end end @@ -85,8 +85,8 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do context 'with a runner_version that is too recent' do let(:runner_version) { 'v14.2.0' } - it 'returns :not_available' do - is_expected.to eq([parsed_runner_version, :not_available]) + it 'returns :unavailable' do + is_expected.to eq([parsed_runner_version, :unavailable]) end end end @@ -96,14 +96,14 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do context 'with valid params' do where(:runner_version, :expected_status, :expected_suggested_version) do - 'v15.0.0' | :not_available | '15.0.0' # not available since the GitLab instance is still on 14.x, a major version might be incompatible, and a patch upgrade is not available + 'v15.0.0' | :unavailable | '15.0.0' # not available since the GitLab instance is still on 14.x, a major version might be incompatible, and a patch upgrade is not available 'v14.1.0-rc3' | :recommended | '14.1.1' # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes 'v14.1.0~beta.1574.gf6ea9389' | :recommended | '14.1.1' # suffixes are correctly handled 'v14.1.0/1.1.0' | :recommended | '14.1.1' # suffixes are correctly handled 'v14.1.0' | :recommended | '14.1.1' # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes 'v14.0.1' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available 'v14.0.2-rc1' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available and we'll move out of a release candidate - 'v14.0.2' | :not_available | '14.0.2' # not available since 14.0.2 is the latest 14.0.x release available within the instance's major.minor version + 'v14.0.2' | :unavailable | '14.0.2' # not available since 14.0.2 is the latest 14.0.x release available within the instance's major.minor version 'v13.10.1' | :available | '14.0.2' # available upgrade: 14.0.2 'v13.10.1~beta.1574.gf6ea9389' | :recommended | '13.10.1' # suffixes are correctly handled, official 13.10.1 is available 'v13.10.1/1.1.0' | :recommended | '13.10.1' # suffixes are correctly handled, official 13.10.1 is available @@ -125,13 +125,13 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do context 'with valid params' do where(:runner_version, :expected_status, :expected_suggested_version) do - 'v14.0.0' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available, even though the GitLab instance is still on 13.x and a major version might be incompatible - 'v13.10.1' | :not_available | '13.10.1' # not available since 13.10.1 is already ahead of GitLab instance version and is the latest patch update for 13.10.x - 'v13.10.0' | :recommended | '13.10.1' # recommended upgrade since 13.10.1 is available - 'v13.9.2' | :not_available | '13.9.2' # not_available even though backports are no longer released for this version because the runner is already on the same version as the GitLab version - 'v13.9.0' | :recommended | '13.9.2' # recommended upgrade since backports are no longer released for this version - 'v13.8.1' | :recommended | '13.9.2' # recommended upgrade since build is too old (missing in records) - 'v11.4.1' | :recommended | '13.9.2' # recommended upgrade since build is too old (missing in records) + 'v14.0.0' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available, even though the GitLab instance is still on 13.x and a major version might be incompatible + 'v13.10.1' | :unavailable | '13.10.1' # not available since 13.10.1 is already ahead of GitLab instance version and is the latest patch update for 13.10.x + 'v13.10.0' | :recommended | '13.10.1' # recommended upgrade since 13.10.1 is available + 'v13.9.2' | :unavailable | '13.9.2' # not available even though backports are no longer released for this version because the runner is already on the same version as the GitLab version + 'v13.9.0' | :recommended | '13.9.2' # recommended upgrade since backports are no longer released for this version + 'v13.8.1' | :recommended | '13.9.2' # recommended upgrade since build is too old (missing in records) + 'v11.4.1' | :recommended | '13.9.2' # recommended upgrade since build is too old (missing in records) end with_them do diff --git a/spec/lib/gitlab/ci/status/bridge/common_spec.rb b/spec/lib/gitlab/ci/status/bridge/common_spec.rb index 37524afc83d..fef97c73a91 100644 --- a/spec/lib/gitlab/ci/status/bridge/common_spec.rb +++ b/spec/lib/gitlab/ci/status/bridge/common_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Status::Bridge::Common do +RSpec.describe Gitlab::Ci::Status::Bridge::Common, feature_category: :continuous_integration do let_it_be(:user) { create(:user) } let_it_be(:bridge) { create(:ci_bridge) } let_it_be(:downstream_pipeline) { create(:ci_pipeline) } @@ -37,4 +37,35 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Common do it { expect(subject.details_path).to be_nil } end end + + describe '#label' do + let(:description) { 'my description' } + let(:bridge) { create(:ci_bridge, description: description) } + + subject do + Gitlab::Ci::Status::Created + .new(bridge, user) + .extend(described_class) + end + + it 'returns description' do + expect(subject.label).to eq description + end + + context 'when description is nil' do + let(:description) { nil } + + it 'returns core status label' do + expect(subject.label).to eq('created') + end + end + + context 'when description is empty string' do + let(:description) { '' } + + it 'returns core status label' do + expect(subject.label).to eq('created') + end + end + end end diff --git a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb index c13901a4776..040c3ec7f6e 100644 --- a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb +++ b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb @@ -25,7 +25,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou expect(status.text).to eq s_('CiStatusText|created') expect(status.icon).to eq 'status_created' expect(status.favicon).to eq 'favicon_status_created' - expect(status.label).to be_nil + expect(status.label).to eq 'created' expect(status).not_to have_details expect(status).not_to have_action end @@ -40,7 +40,8 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou it 'matches correct extended statuses' do expect(factory.extended_statuses) - .to eq [Gitlab::Ci::Status::Bridge::Failed] + .to eq [Gitlab::Ci::Status::Bridge::Retryable, + Gitlab::Ci::Status::Bridge::Failed] end it 'fabricates a failed bridge status' do @@ -51,10 +52,10 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou expect(status.text).to eq s_('CiStatusText|failed') expect(status.icon).to eq 'status_failed' expect(status.favicon).to eq 'favicon_status_failed' - expect(status.label).to be_nil + expect(status.label).to eq 'failed' expect(status.status_tooltip).to eq "#{s_('CiStatusText|failed')} - (unknown failure)" expect(status).not_to have_details - expect(status).not_to have_action + expect(status).to have_action end context 'failed with downstream_pipeline_creation_failed' do @@ -130,12 +131,36 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuou expect(status.text).to eq 'waiting' expect(status.group).to eq 'waiting-for-resource' expect(status.icon).to eq 'status_pending' - expect(status.favicon).to eq 'favicon_pending' + expect(status.favicon).to eq 'favicon_status_pending' expect(status.illustration).to include(:image, :size, :title) expect(status).not_to have_details end end + context 'when the bridge is successful and therefore retryable' do + let(:bridge) { create(:ci_bridge, :success) } + + it 'matches correct core status' do + expect(factory.core_status).to be_a Gitlab::Ci::Status::Success + end + + it 'matches correct extended statuses' do + expect(factory.extended_statuses) + .to eq [Gitlab::Ci::Status::Bridge::Retryable] + end + + it 'fabricates a retryable build status' do + expect(status).to be_a Gitlab::Ci::Status::Bridge::Retryable + end + + it 'fabricates status with correct details' do + expect(status.text).to eq s_('CiStatusText|passed') + expect(status.icon).to eq 'status_success' + expect(status.favicon).to eq 'favicon_status_success' + expect(status).to have_action + end + end + private def create_bridge(*traits) diff --git a/spec/lib/gitlab/ci/status/build/play_spec.rb b/spec/lib/gitlab/ci/status/build/play_spec.rb index ade07a54877..2c93f842a30 100644 --- a/spec/lib/gitlab/ci/status/build/play_spec.rb +++ b/spec/lib/gitlab/ci/status/build/play_spec.rb @@ -75,7 +75,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Play do end describe '#action_button_title' do - it { expect(subject.action_button_title).to eq 'Trigger this manual action' } + it { expect(subject.action_button_title).to eq 'Run job' } end describe '.matches?' do diff --git a/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb index bb6139accaf..6f5ab77a358 100644 --- a/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb +++ b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb @@ -20,7 +20,7 @@ RSpec.describe Gitlab::Ci::Status::WaitingForResource do end describe '#favicon' do - it { expect(subject.favicon).to eq 'favicon_pending' } + it { expect(subject.favicon).to eq 'favicon_status_pending' } end describe '#group' do diff --git a/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb index 43deb465025..e4cee379f40 100644 --- a/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb +++ b/spec/lib/gitlab/ci/templates/5_minute_production_app_ci_yaml_spec.rb @@ -2,16 +2,16 @@ require 'spec_helper' -RSpec.describe '5-Minute-Production-App.gitlab-ci.yml' do +RSpec.describe '5-Minute-Production-App.gitlab-ci.yml', feature_category: :five_minute_production_app do subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('5-Minute-Production-App') } describe 'the created pipeline' do - let_it_be(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) } + let_it_be_with_refind(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) } let(:user) { project.first_owner } let(:default_branch) { 'master' } let(:pipeline_branch) { default_branch } - let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch) } let(:pipeline) { service.execute(:push).payload } let(:build_names) { pipeline.builds.pluck(:name) } @@ -24,24 +24,27 @@ RSpec.describe '5-Minute-Production-App.gitlab-ci.yml' do end context 'when AWS variables are set' do + def create_ci_variable(key, value) + create(:ci_variable, project: project, key: key, value: value) + end + before do - create(:ci_variable, project: project, key: 'AWS_ACCESS_KEY_ID', value: 'AKIAIOSFODNN7EXAMPLE') - create(:ci_variable, project: project, key: 'AWS_SECRET_ACCESS_KEY', value: 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY') - create(:ci_variable, project: project, key: 'AWS_DEFAULT_REGION', value: 'us-west-2') + create_ci_variable('AWS_ACCESS_KEY_ID', 'AKIAIOSFODNN7EXAMPLE') + create_ci_variable('AWS_SECRET_ACCESS_KEY', 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY') + create_ci_variable('AWS_DEFAULT_REGION', 'us-west-2') end it 'creates all jobs' do - expect(build_names).to match_array(%w(build terraform_apply deploy terraform_destroy)) + expect(build_names).to match_array(%w[build terraform_apply deploy terraform_destroy]) end - context 'pipeline branch is protected' do + context 'when pipeline branch is protected' do before do create(:protected_branch, project: project, name: pipeline_branch) - project.reload end it 'does not create a destroy job' do - expect(build_names).to match_array(%w(build terraform_apply deploy)) + expect(build_names).to match_array(%w[build terraform_apply deploy]) end end end diff --git a/spec/lib/gitlab/ci/templates/Terraform/module_base_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Terraform/module_base_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..9f4f6b02b0b --- /dev/null +++ b/spec/lib/gitlab/ci/templates/Terraform/module_base_gitlab_ci_yaml_spec.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Terraform/Module-Base.gitlab-ci.yml', feature_category: :continuous_integration do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform/Module-Base') } + + describe 'the created pipeline' do + let(:default_branch) { 'main' } + let(:pipeline_branch) { default_branch } + let_it_be(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) } + let(:user) { project.first_owner } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch) } + let(:pipeline) { service.execute(:push).payload } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + allow(project).to receive(:default_branch).and_return(default_branch) + end + + it 'does not create any jobs' do + expect(build_names).to be_empty + end + end +end diff --git a/spec/lib/gitlab/ci/templates/terraform_module_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_module_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..7c3c1776111 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/terraform_module_gitlab_ci_yaml_spec.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Terraform-Module.gitlab-ci.yml', feature_category: :continuous_integration do + before do + allow(Gitlab::Template::GitlabCiYmlTemplate).to receive(:excluded_patterns).and_return([]) + end + + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform-Module') } + + shared_examples 'on any branch' do + it 'creates fmt and kics job', :aggregate_failures do + expect(pipeline.errors).to be_empty + expect(build_names).to include('fmt', 'kics-iac-sast') + end + + it 'does not create a deploy job', :aggregate_failures do + expect(pipeline.errors).to be_empty + expect(build_names).not_to include('deploy') + end + end + + let_it_be(:project) { create(:project, :repository, create_branch: 'patch-1', create_tag: '1.0.0') } + let_it_be(:user) { project.first_owner } + + describe 'the created pipeline' do + let(:default_branch) { project.default_branch_or_main } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute(:push).payload } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + allow_next_instance_of(Ci::BuildScheduleWorker) do |instance| + allow(instance).to receive(:perform).and_return(true) + end + allow(project).to receive(:default_branch).and_return(default_branch) + end + + context 'when on default branch' do + let(:pipeline_ref) { default_branch } + + it_behaves_like 'on any branch' + end + + context 'when outside the default branch' do + let(:pipeline_ref) { 'patch-1' } + + it_behaves_like 'on any branch' + end + + context 'when on tag' do + let(:pipeline_ref) { '1.0.0' } + + it 'creates fmt and deploy job', :aggregate_failures do + expect(pipeline.errors).to be_empty + expect(build_names).to include('fmt', 'deploy') + end + end + end +end diff --git a/spec/lib/gitlab/ci/trace/archive_spec.rb b/spec/lib/gitlab/ci/trace/archive_spec.rb index 582c4ad343f..cce6477b91e 100644 --- a/spec/lib/gitlab/ci/trace/archive_spec.rb +++ b/spec/lib/gitlab/ci/trace/archive_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Ci::Trace::Archive do +RSpec.describe Gitlab::Ci::Trace::Archive, feature_category: :scalability do context 'with transactional fixtures' do let_it_be_with_reload(:job) { create(:ci_build, :success, :trace_live) } let_it_be_with_reload(:trace_metadata) { create(:ci_build_trace_metadata, build: job) } diff --git a/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb new file mode 100644 index 00000000000..a5365ae53b8 --- /dev/null +++ b/spec/lib/gitlab/ci/variables/builder/pipeline_spec.rb @@ -0,0 +1,336 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Variables::Builder::Pipeline, feature_category: :pipeline_authoring do + let_it_be(:project) { create_default(:project, :repository, create_tag: 'test').freeze } + let_it_be(:user) { create(:user) } + + let(:pipeline) { build(:ci_empty_pipeline, :created, project: project) } + + describe '#predefined_variables' do + subject { described_class.new(pipeline).predefined_variables } + + it 'includes all predefined variables in a valid order' do + keys = subject.pluck(:key) + + expect(keys).to contain_exactly(*%w[ + CI_PIPELINE_IID + CI_PIPELINE_SOURCE + CI_PIPELINE_CREATED_AT + CI_COMMIT_SHA + CI_COMMIT_SHORT_SHA + CI_COMMIT_BEFORE_SHA + CI_COMMIT_REF_NAME + CI_COMMIT_REF_SLUG + CI_COMMIT_BRANCH + CI_COMMIT_MESSAGE + CI_COMMIT_TITLE + CI_COMMIT_DESCRIPTION + CI_COMMIT_REF_PROTECTED + CI_COMMIT_TIMESTAMP + CI_COMMIT_AUTHOR + CI_BUILD_REF + CI_BUILD_BEFORE_SHA + CI_BUILD_REF_NAME + CI_BUILD_REF_SLUG + ]) + end + + context 'when the pipeline is running for a tag' do + let(:pipeline) { build(:ci_empty_pipeline, :created, project: project, ref: 'test', tag: true) } + + it 'includes all predefined variables in a valid order' do + keys = subject.pluck(:key) + + expect(keys).to contain_exactly(*%w[ + CI_PIPELINE_IID + CI_PIPELINE_SOURCE + CI_PIPELINE_CREATED_AT + CI_COMMIT_SHA + CI_COMMIT_SHORT_SHA + CI_COMMIT_BEFORE_SHA + CI_COMMIT_REF_NAME + CI_COMMIT_REF_SLUG + CI_COMMIT_MESSAGE + CI_COMMIT_TITLE + CI_COMMIT_DESCRIPTION + CI_COMMIT_REF_PROTECTED + CI_COMMIT_TIMESTAMP + CI_COMMIT_AUTHOR + CI_BUILD_REF + CI_BUILD_BEFORE_SHA + CI_BUILD_REF_NAME + CI_BUILD_REF_SLUG + CI_COMMIT_TAG + CI_COMMIT_TAG_MESSAGE + CI_BUILD_TAG + ]) + end + end + + context 'when merge request is present' do + let_it_be(:assignees) { create_list(:user, 2) } + let_it_be(:milestone) { create(:milestone, project: project) } + let_it_be(:labels) { create_list(:label, 2) } + + let(:merge_request) do + create(:merge_request, :simple, + source_project: project, + target_project: project, + assignees: assignees, + milestone: milestone, + labels: labels) + end + + context 'when pipeline for merge request is created' do + let(:pipeline) do + create(:ci_pipeline, :detached_merge_request_pipeline, + ci_ref_presence: false, + user: user, + merge_request: merge_request) + end + + before do + project.add_developer(user) + end + + it 'exposes merge request pipeline variables' do + expect(subject.to_hash) + .to include( + 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s, + 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s, + 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s, + 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s, + 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path, + 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url, + 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s, + 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?( + merge_request.target_project, + merge_request.target_branch + ).to_s, + 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => '', + 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s, + 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path, + 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url, + 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s, + 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => '', + 'CI_MERGE_REQUEST_TITLE' => merge_request.title, + 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list, + 'CI_MERGE_REQUEST_MILESTONE' => milestone.title, + 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','), + 'CI_MERGE_REQUEST_EVENT_TYPE' => 'detached', + 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true)) + end + + it 'exposes diff variables' do + expect(subject.to_hash) + .to include( + 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s, + 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha) + end + + context 'without assignee' do + let(:assignees) { [] } + + it 'does not expose assignee variable' do + expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_ASSIGNEES') + end + end + + context 'without milestone' do + let(:milestone) { nil } + + it 'does not expose milestone variable' do + expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_MILESTONE') + end + end + + context 'without labels' do + let(:labels) { [] } + + it 'does not expose labels variable' do + expect(subject.to_hash.keys).not_to include('CI_MERGE_REQUEST_LABELS') + end + end + end + + context 'when pipeline on branch is created' do + let(:pipeline) do + create(:ci_pipeline, project: project, user: user, ref: 'feature') + end + + context 'when a merge request is created' do + before do + merge_request + end + + context 'when user has access to project' do + before do + project.add_developer(user) + end + + it 'merge request references are returned matching the pipeline' do + expect(subject.to_hash).to include( + 'CI_OPEN_MERGE_REQUESTS' => merge_request.to_reference(full: true)) + end + end + + context 'when user does not have access to project' do + it 'CI_OPEN_MERGE_REQUESTS is not returned' do + expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS') + end + end + end + + context 'when no a merge request is created' do + it 'CI_OPEN_MERGE_REQUESTS is not returned' do + expect(subject.to_hash).not_to have_key('CI_OPEN_MERGE_REQUESTS') + end + end + end + + context 'with merged results' do + let(:pipeline) do + create(:ci_pipeline, :merged_result_pipeline, merge_request: merge_request) + end + + it 'exposes merge request pipeline variables' do + expect(subject.to_hash) + .to include( + 'CI_MERGE_REQUEST_ID' => merge_request.id.to_s, + 'CI_MERGE_REQUEST_IID' => merge_request.iid.to_s, + 'CI_MERGE_REQUEST_REF_PATH' => merge_request.ref_path.to_s, + 'CI_MERGE_REQUEST_PROJECT_ID' => merge_request.project.id.to_s, + 'CI_MERGE_REQUEST_PROJECT_PATH' => merge_request.project.full_path, + 'CI_MERGE_REQUEST_PROJECT_URL' => merge_request.project.web_url, + 'CI_MERGE_REQUEST_TARGET_BRANCH_NAME' => merge_request.target_branch.to_s, + 'CI_MERGE_REQUEST_TARGET_BRANCH_PROTECTED' => ProtectedBranch.protected?( + merge_request.target_project, + merge_request.target_branch + ).to_s, + 'CI_MERGE_REQUEST_TARGET_BRANCH_SHA' => merge_request.target_branch_sha, + 'CI_MERGE_REQUEST_SOURCE_PROJECT_ID' => merge_request.source_project.id.to_s, + 'CI_MERGE_REQUEST_SOURCE_PROJECT_PATH' => merge_request.source_project.full_path, + 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' => merge_request.source_project.web_url, + 'CI_MERGE_REQUEST_SOURCE_BRANCH_NAME' => merge_request.source_branch.to_s, + 'CI_MERGE_REQUEST_SOURCE_BRANCH_SHA' => merge_request.source_branch_sha, + 'CI_MERGE_REQUEST_TITLE' => merge_request.title, + 'CI_MERGE_REQUEST_ASSIGNEES' => merge_request.assignee_username_list, + 'CI_MERGE_REQUEST_MILESTONE' => milestone.title, + 'CI_MERGE_REQUEST_LABELS' => labels.map(&:title).sort.join(','), + 'CI_MERGE_REQUEST_EVENT_TYPE' => 'merged_result') + end + + it 'exposes diff variables' do + expect(subject.to_hash) + .to include( + 'CI_MERGE_REQUEST_DIFF_ID' => merge_request.merge_request_diff.id.to_s, + 'CI_MERGE_REQUEST_DIFF_BASE_SHA' => merge_request.merge_request_diff.base_commit_sha) + end + end + end + + context 'when source is external pull request' do + let(:pipeline) do + create(:ci_pipeline, source: :external_pull_request_event, external_pull_request: pull_request) + end + + let(:pull_request) { create(:external_pull_request, project: project) } + + it 'exposes external pull request pipeline variables' do + expect(subject.to_hash) + .to include( + 'CI_EXTERNAL_PULL_REQUEST_IID' => pull_request.pull_request_iid.to_s, + 'CI_EXTERNAL_PULL_REQUEST_SOURCE_REPOSITORY' => pull_request.source_repository, + 'CI_EXTERNAL_PULL_REQUEST_TARGET_REPOSITORY' => pull_request.target_repository, + 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA' => pull_request.source_sha, + 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA' => pull_request.target_sha, + 'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME' => pull_request.source_branch, + 'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_NAME' => pull_request.target_branch + ) + end + end + + describe 'variable CI_KUBERNETES_ACTIVE' do + context 'when pipeline.has_kubernetes_active? is true' do + before do + allow(pipeline).to receive(:has_kubernetes_active?).and_return(true) + end + + it "is included with value 'true'" do + expect(subject.to_hash).to include('CI_KUBERNETES_ACTIVE' => 'true') + end + end + + context 'when pipeline.has_kubernetes_active? is false' do + before do + allow(pipeline).to receive(:has_kubernetes_active?).and_return(false) + end + + it 'is not included' do + expect(subject.to_hash).not_to have_key('CI_KUBERNETES_ACTIVE') + end + end + end + + describe 'variable CI_GITLAB_FIPS_MODE' do + context 'when FIPS flag is enabled' do + before do + allow(Gitlab::FIPS).to receive(:enabled?).and_return(true) + end + + it "is included with value 'true'" do + expect(subject.to_hash).to include('CI_GITLAB_FIPS_MODE' => 'true') + end + end + + context 'when FIPS flag is disabled' do + before do + allow(Gitlab::FIPS).to receive(:enabled?).and_return(false) + end + + it 'is not included' do + expect(subject.to_hash).not_to have_key('CI_GITLAB_FIPS_MODE') + end + end + end + + context 'when tag is not found' do + let(:pipeline) do + create(:ci_pipeline, project: project, ref: 'not_found_tag', tag: true) + end + + it 'does not expose tag variables' do + expect(subject.to_hash.keys) + .not_to include( + 'CI_COMMIT_TAG', + 'CI_COMMIT_TAG_MESSAGE', + 'CI_BUILD_TAG' + ) + end + end + + context 'without a commit' do + let(:pipeline) { build(:ci_empty_pipeline, :created, sha: nil) } + + it 'does not expose commit variables' do + expect(subject.to_hash.keys) + .not_to include( + 'CI_COMMIT_SHA', + 'CI_COMMIT_SHORT_SHA', + 'CI_COMMIT_BEFORE_SHA', + 'CI_COMMIT_REF_NAME', + 'CI_COMMIT_REF_SLUG', + 'CI_COMMIT_BRANCH', + 'CI_COMMIT_TAG', + 'CI_COMMIT_MESSAGE', + 'CI_COMMIT_TITLE', + 'CI_COMMIT_DESCRIPTION', + 'CI_COMMIT_REF_PROTECTED', + 'CI_COMMIT_TIMESTAMP', + 'CI_COMMIT_AUTHOR') + end + end + end +end diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb index 5aa752ee429..bbd3dc54e6a 100644 --- a/spec/lib/gitlab/ci/variables/builder_spec.rb +++ b/spec/lib/gitlab/ci/variables/builder_spec.rb @@ -166,8 +166,14 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur end before do + pipeline_variables_builder = double( + ::Gitlab::Ci::Variables::Builder::Pipeline, + predefined_variables: [var('C', 3), var('D', 3)] + ) + allow(builder).to receive(:predefined_variables) { [var('A', 1), var('B', 1)] } allow(pipeline.project).to receive(:predefined_variables) { [var('B', 2), var('C', 2)] } + allow(builder).to receive(:pipeline_variables_builder) { pipeline_variables_builder } allow(pipeline).to receive(:predefined_variables) { [var('C', 3), var('D', 3)] } allow(job).to receive(:runner) { double(predefined_variables: [var('D', 4), var('E', 4)]) } allow(builder).to receive(:kubernetes_variables) { [var('E', 5), var('F', 5)] } @@ -635,8 +641,13 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur end before do + pipeline_variables_builder = double( + ::Gitlab::Ci::Variables::Builder::Pipeline, + predefined_variables: [var('B', 2), var('C', 2)] + ) + allow(pipeline.project).to receive(:predefined_variables) { [var('A', 1), var('B', 1)] } - allow(pipeline).to receive(:predefined_variables) { [var('B', 2), var('C', 2)] } + allow(builder).to receive(:pipeline_variables_builder) { pipeline_variables_builder } allow(builder).to receive(:secret_instance_variables) { [var('C', 3), var('D', 3)] } allow(builder).to receive(:secret_group_variables) { [var('D', 4), var('E', 4)] } allow(builder).to receive(:secret_project_variables) { [var('E', 5), var('F', 5)] } diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index b9f65ff749d..360686ce65c 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -1503,7 +1503,7 @@ module Gitlab end context "when the included internal file is not present" do - it_behaves_like 'returns errors', "Local file `/local.gitlab-ci.yml` does not exist!" + it_behaves_like 'returns errors', "Local file `local.gitlab-ci.yml` does not exist!" end end end diff --git a/spec/lib/gitlab/config/loader/multi_doc_yaml_spec.rb b/spec/lib/gitlab/config/loader/multi_doc_yaml_spec.rb new file mode 100644 index 00000000000..bae98f9bc35 --- /dev/null +++ b/spec/lib/gitlab/config/loader/multi_doc_yaml_spec.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Config::Loader::MultiDocYaml, feature_category: :pipeline_authoring do + let(:loader) { described_class.new(yml, max_documents: 2) } + + describe '#load!' do + let(:yml) do + <<~YAML + spec: + inputs: + test_input: + --- + test_job: + script: echo "$[[ inputs.test_input ]]" + YAML + end + + it 'returns the loaded YAML with all keys as symbols' do + expect(loader.load!).to eq([ + { spec: { inputs: { test_input: nil } } }, + { test_job: { script: 'echo "$[[ inputs.test_input ]]"' } } + ]) + end + + context 'when the YAML file is empty' do + let(:yml) { '' } + + it 'returns an empty array' do + expect(loader.load!).to be_empty + end + end + + context 'when the parsed YAML is too big' do + let(:yml) do + <<~YAML + a: &a ["lol","lol","lol","lol","lol","lol","lol","lol","lol"] + b: &b [*a,*a,*a,*a,*a,*a,*a,*a,*a] + c: &c [*b,*b,*b,*b,*b,*b,*b,*b,*b] + d: &d [*c,*c,*c,*c,*c,*c,*c,*c,*c] + e: &e [*d,*d,*d,*d,*d,*d,*d,*d,*d] + f: &f [*e,*e,*e,*e,*e,*e,*e,*e,*e] + g: &g [*f,*f,*f,*f,*f,*f,*f,*f,*f] + h: &h [*g,*g,*g,*g,*g,*g,*g,*g,*g] + i: &i [*h,*h,*h,*h,*h,*h,*h,*h,*h] + --- + a: &a ["lol","lol","lol","lol","lol","lol","lol","lol","lol"] + b: &b [*a,*a,*a,*a,*a,*a,*a,*a,*a] + c: &c [*b,*b,*b,*b,*b,*b,*b,*b,*b] + d: &d [*c,*c,*c,*c,*c,*c,*c,*c,*c] + e: &e [*d,*d,*d,*d,*d,*d,*d,*d,*d] + f: &f [*e,*e,*e,*e,*e,*e,*e,*e,*e] + g: &g [*f,*f,*f,*f,*f,*f,*f,*f,*f] + h: &h [*g,*g,*g,*g,*g,*g,*g,*g,*g] + i: &i [*h,*h,*h,*h,*h,*h,*h,*h,*h] + YAML + end + + it 'raises a DataTooLargeError' do + expect { loader.load! }.to raise_error(described_class::DataTooLargeError, 'The parsed YAML is too big') + end + end + + context 'when a document is not a hash' do + let(:yml) do + <<~YAML + not_a_hash + --- + test_job: + script: echo "$[[ inputs.test_input ]]" + YAML + end + + it 'raises a NotHashError' do + expect { loader.load! }.to raise_error(described_class::NotHashError, 'Invalid configuration format') + end + end + + context 'when there are too many documents' do + let(:yml) do + <<~YAML + a: b + --- + c: d + --- + e: f + YAML + end + + it 'raises a TooManyDocumentsError' do + expect { loader.load! }.to raise_error( + described_class::TooManyDocumentsError, + 'The parsed YAML has too many documents' + ) + end + end + end +end diff --git a/spec/lib/gitlab/config/loader/yaml_spec.rb b/spec/lib/gitlab/config/loader/yaml_spec.rb index c7f84cd583c..346424d1681 100644 --- a/spec/lib/gitlab/config/loader/yaml_spec.rb +++ b/spec/lib/gitlab/config/loader/yaml_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Config::Loader::Yaml do +RSpec.describe Gitlab::Config::Loader::Yaml, feature_category: :pipeline_authoring do let(:loader) { described_class.new(yml) } let(:yml) do diff --git a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb index 963c9fe1576..c962b9ad393 100644 --- a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb +++ b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb @@ -43,7 +43,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do context 'with a multiple database' do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) end context 'when both databases meets minimum supported version' do diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index 88bffd41947..f298890623f 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -94,13 +94,31 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do it 'adds CDN host to CSP' do expect(directives['script_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.script_src + " https://cdn.example.com") - expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://cdn.example.com") + expect(directives['style_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.style_src + " https://cdn.example.com") expect(directives['font_src']).to eq("'self' https://cdn.example.com") expect(directives['worker_src']).to eq('http://localhost/assets/ blob: data: https://cdn.example.com') expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/") end end + describe 'Zuora directives' do + context 'when is Gitlab.com?' do + before do + allow(::Gitlab).to receive(:com?).and_return(true) + end + + it 'adds Zuora host to CSP' do + expect(directives['frame_src']).to include('https://*.zuora.com/apps/PublicHostedPageLite.do') + end + end + + context 'when is not Gitlab.com?' do + it 'does not add Zuora host to CSP' do + expect(directives['frame_src']).not_to include('https://*.zuora.com/apps/PublicHostedPageLite.do') + end + end + end + context 'when sentry is configured' do let(:legacy_dsn) { 'dummy://abc@legacy-sentry.example.com/1' } let(:dsn) { 'dummy://def@sentry.example.com/2' } diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb index 3736914669a..326e27fa716 100644 --- a/spec/lib/gitlab/contributions_calendar_spec.rb +++ b/spec/lib/gitlab/contributions_calendar_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ContributionsCalendar, feature_category: :users do +RSpec.describe Gitlab::ContributionsCalendar, feature_category: :user_profile do let_it_be_with_reload(:contributor) { create(:user) } let_it_be_with_reload(:user) { create(:user) } let(:travel_time) { nil } diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb index 92ffeee8509..f85fb1540d9 100644 --- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb +++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::CycleAnalytics::StageSummary do +RSpec.describe Gitlab::CycleAnalytics::StageSummary, feature_category: :devops_reports do include CycleAnalyticsHelpers let_it_be(:project) { create(:project, :repository) } diff --git a/spec/lib/gitlab/database/indexing_exclusive_lease_guard_spec.rb b/spec/lib/gitlab/database/async_ddl_exclusive_lease_guard_spec.rb index ddc9cdee92f..60ccf5ec685 100644 --- a/spec/lib/gitlab/database/indexing_exclusive_lease_guard_spec.rb +++ b/spec/lib/gitlab/database/async_ddl_exclusive_lease_guard_spec.rb @@ -2,25 +2,25 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::IndexingExclusiveLeaseGuard, feature_category: :database do +RSpec.describe Gitlab::Database::AsyncDdlExclusiveLeaseGuard, feature_category: :database do let(:helper_class) do Class.new do - include Gitlab::Database::IndexingExclusiveLeaseGuard + include Gitlab::Database::AsyncDdlExclusiveLeaseGuard - attr_reader :connection + attr_reader :connection_db_config - def initialize(connection) - @connection = connection + def initialize(connection_db_config) + @connection_db_config = connection_db_config end end end describe '#lease_key' do - let(:helper) { helper_class.new(connection) } - let(:lease_key) { "gitlab/database/indexing/actions/#{database_name}" } + let(:helper) { helper_class.new(connection_db_config) } + let(:lease_key) { "gitlab/database/asyncddl/actions/#{database_name}" } context 'with CI database connection' do - let(:connection) { Ci::ApplicationRecord.connection } + let(:connection_db_config) { Ci::ApplicationRecord.connection_db_config } let(:database_name) { Gitlab::Database::CI_DATABASE_NAME } before do @@ -31,7 +31,7 @@ RSpec.describe Gitlab::Database::IndexingExclusiveLeaseGuard, feature_category: end context 'with MAIN database connection' do - let(:connection) { ApplicationRecord.connection } + let(:connection_db_config) { ApplicationRecord.connection_db_config } let(:database_name) { Gitlab::Database::MAIN_DATABASE_NAME } it { expect(helper.lease_key).to eq(lease_key) } diff --git a/spec/lib/gitlab/database/async_foreign_keys/foreign_key_validator_spec.rb b/spec/lib/gitlab/database/async_foreign_keys/foreign_key_validator_spec.rb new file mode 100644 index 00000000000..90137e259f5 --- /dev/null +++ b/spec/lib/gitlab/database/async_foreign_keys/foreign_key_validator_spec.rb @@ -0,0 +1,152 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncForeignKeys::ForeignKeyValidator, feature_category: :database do + include ExclusiveLeaseHelpers + + describe '#perform' do + let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) } + let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } + let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION } + + let(:fk_model) { Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation } + let(:table_name) { '_test_async_fks' } + let(:fk_name) { 'fk_parent_id' } + let(:validation) { create(:postgres_async_foreign_key_validation, table_name: table_name, name: fk_name) } + let(:connection) { validation.connection } + + subject { described_class.new(validation) } + + before do + connection.create_table(table_name) do |t| + t.references :parent, foreign_key: { to_table: table_name, validate: false, name: fk_name } + end + end + + it 'validates the FK while controlling statement timeout' do + allow(connection).to receive(:execute).and_call_original + expect(connection).to receive(:execute) + .with("SET statement_timeout TO '43200s'").ordered.and_call_original + expect(connection).to receive(:execute) + .with('ALTER TABLE "_test_async_fks" VALIDATE CONSTRAINT "fk_parent_id";').ordered.and_call_original + expect(connection).to receive(:execute) + .with("RESET statement_timeout").ordered.and_call_original + + subject.perform + end + + context 'with fully qualified table names' do + let(:validation) do + create(:postgres_async_foreign_key_validation, + table_name: "public.#{table_name}", + name: fk_name + ) + end + + it 'validates the FK' do + allow(connection).to receive(:execute).and_call_original + + expect(connection).to receive(:execute) + .with('ALTER TABLE "public"."_test_async_fks" VALIDATE CONSTRAINT "fk_parent_id";').ordered.and_call_original + + subject.perform + end + end + + it 'removes the FK validation record from table' do + expect(validation).to receive(:destroy!).and_call_original + + expect { subject.perform }.to change { fk_model.count }.by(-1) + end + + it 'skips logic if not able to acquire exclusive lease' do + expect(lease).to receive(:try_obtain).ordered.and_return(false) + expect(connection).not_to receive(:execute).with(/ALTER TABLE/) + expect(validation).not_to receive(:destroy!) + + expect { subject.perform }.not_to change { fk_model.count } + end + + it 'logs messages around execution' do + allow(Gitlab::AppLogger).to receive(:info).and_call_original + + subject.perform + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: 'Starting to validate foreign key')) + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: 'Finished validating foreign key')) + end + + context 'when the FK does not exist' do + before do + connection.create_table(table_name, force: true) + end + + it 'skips validation and removes the record' do + expect(connection).not_to receive(:execute).with(/ALTER TABLE/) + + expect { subject.perform }.to change { fk_model.count }.by(-1) + end + + it 'logs an appropriate message' do + expected_message = "Skipping #{fk_name} validation since it does not exist. The queuing entry will be deleted" + + allow(Gitlab::AppLogger).to receive(:info).and_call_original + + subject.perform + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: expected_message)) + end + end + + context 'with error handling' do + before do + allow(connection).to receive(:execute).and_call_original + + allow(connection).to receive(:execute) + .with('ALTER TABLE "_test_async_fks" VALIDATE CONSTRAINT "fk_parent_id";') + .and_raise(ActiveRecord::StatementInvalid) + end + + context 'on production' do + before do + allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false) + end + + it 'increases execution attempts' do + expect { subject.perform }.to change { validation.attempts }.by(1) + + expect(validation.last_error).to be_present + expect(validation).not_to be_destroyed + end + + it 'logs an error message including the fk_name' do + expect(Gitlab::AppLogger) + .to receive(:error) + .with(a_hash_including(:message, :fk_name)) + .and_call_original + + subject.perform + end + end + + context 'on development' do + it 'also raises errors' do + expect { subject.perform } + .to raise_error(ActiveRecord::StatementInvalid) + .and change { validation.attempts }.by(1) + + expect(validation.last_error).to be_present + expect(validation).not_to be_destroyed + end + end + end + end +end diff --git a/spec/lib/gitlab/database/async_foreign_keys/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_foreign_keys/migration_helpers_spec.rb new file mode 100644 index 00000000000..0bd0e8045ff --- /dev/null +++ b/spec/lib/gitlab/database/async_foreign_keys/migration_helpers_spec.rb @@ -0,0 +1,167 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncForeignKeys::MigrationHelpers, feature_category: :database do + let(:migration) { Gitlab::Database::Migration[2.1].new } + let(:connection) { ApplicationRecord.connection } + let(:fk_model) { Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation } + let(:table_name) { '_test_async_fks' } + let(:column_name) { 'parent_id' } + let(:fk_name) { nil } + + context 'with regular tables' do + before do + allow(migration).to receive(:puts) + allow(migration.connection).to receive(:transaction_open?).and_return(false) + + connection.create_table(table_name) do |t| + t.integer column_name + end + + migration.add_concurrent_foreign_key( + table_name, table_name, + column: column_name, validate: false, name: fk_name) + end + + describe '#prepare_async_foreign_key_validation' do + it 'creates the record for the async FK validation' do + expect do + migration.prepare_async_foreign_key_validation(table_name, column_name) + end.to change { fk_model.where(table_name: table_name).count }.by(1) + + record = fk_model.find_by(table_name: table_name) + + expect(record.name).to start_with('fk_') + end + + context 'when an explicit name is given' do + let(:fk_name) { 'my_fk_name' } + + it 'creates the record with the given name' do + expect do + migration.prepare_async_foreign_key_validation(table_name, name: fk_name) + end.to change { fk_model.where(name: fk_name).count }.by(1) + + record = fk_model.find_by(name: fk_name) + + expect(record.table_name).to eq(table_name) + end + end + + context 'when the FK does not exist' do + it 'returns an error' do + expect do + migration.prepare_async_foreign_key_validation(table_name, name: 'no_fk') + end.to raise_error RuntimeError, /Could not find foreign key "no_fk" on table "_test_async_fks"/ + end + end + + context 'when the record already exists' do + let(:fk_name) { 'my_fk_name' } + + it 'does attempt to create the record' do + create(:postgres_async_foreign_key_validation, table_name: table_name, name: fk_name) + + expect do + migration.prepare_async_foreign_key_validation(table_name, name: fk_name) + end.not_to change { fk_model.where(name: fk_name).count } + end + end + + context 'when the async FK validation table does not exist' do + it 'does not raise an error' do + connection.drop_table(:postgres_async_foreign_key_validations) + + expect(fk_model).not_to receive(:safe_find_or_create_by!) + + expect { migration.prepare_async_foreign_key_validation(table_name, column_name) }.not_to raise_error + end + end + end + + describe '#unprepare_async_foreign_key_validation' do + before do + migration.prepare_async_foreign_key_validation(table_name, column_name, name: fk_name) + end + + it 'destroys the record' do + expect do + migration.unprepare_async_foreign_key_validation(table_name, column_name) + end.to change { fk_model.where(table_name: table_name).count }.by(-1) + end + + context 'when an explicit name is given' do + let(:fk_name) { 'my_test_async_fk' } + + it 'destroys the record' do + expect do + migration.unprepare_async_foreign_key_validation(table_name, name: fk_name) + end.to change { fk_model.where(name: fk_name).count }.by(-1) + end + end + + context 'when the async fk validation table does not exist' do + it 'does not raise an error' do + connection.drop_table(:postgres_async_foreign_key_validations) + + expect(fk_model).not_to receive(:find_by) + + expect { migration.unprepare_async_foreign_key_validation(table_name, column_name) }.not_to raise_error + end + end + end + end + + context 'with partitioned tables' do + let(:partition_schema) { 'gitlab_partitions_dynamic' } + let(:partition1_name) { "#{partition_schema}.#{table_name}_202001" } + let(:partition2_name) { "#{partition_schema}.#{table_name}_202002" } + let(:fk_name) { 'my_partitioned_fk_name' } + + before do + connection.execute(<<~SQL) + CREATE TABLE #{table_name} ( + id serial NOT NULL, + #{column_name} int NOT NULL, + created_at timestamptz NOT NULL, + PRIMARY KEY (id, created_at) + ) PARTITION BY RANGE (created_at); + + CREATE TABLE #{partition1_name} PARTITION OF #{table_name} + FOR VALUES FROM ('2020-01-01') TO ('2020-02-01'); + + CREATE TABLE #{partition2_name} PARTITION OF #{table_name} + FOR VALUES FROM ('2020-02-01') TO ('2020-03-01'); + SQL + end + + describe '#prepare_partitioned_async_foreign_key_validation' do + it 'delegates to prepare_async_foreign_key_validation for each partition' do + expect(migration) + .to receive(:prepare_async_foreign_key_validation) + .with(partition1_name, column_name, name: fk_name) + + expect(migration) + .to receive(:prepare_async_foreign_key_validation) + .with(partition2_name, column_name, name: fk_name) + + migration.prepare_partitioned_async_foreign_key_validation(table_name, column_name, name: fk_name) + end + end + + describe '#unprepare_partitioned_async_foreign_key_validation' do + it 'delegates to unprepare_async_foreign_key_validation for each partition' do + expect(migration) + .to receive(:unprepare_async_foreign_key_validation) + .with(partition1_name, column_name, name: fk_name) + + expect(migration) + .to receive(:unprepare_async_foreign_key_validation) + .with(partition2_name, column_name, name: fk_name) + + migration.unprepare_partitioned_async_foreign_key_validation(table_name, column_name, name: fk_name) + end + end + end +end diff --git a/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb b/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb new file mode 100644 index 00000000000..ba201d93f52 --- /dev/null +++ b/spec/lib/gitlab/database/async_foreign_keys/postgres_async_foreign_key_validation_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncForeignKeys::PostgresAsyncForeignKeyValidation, type: :model, + feature_category: :database do + it { is_expected.to be_a Gitlab::Database::SharedModel } + + describe 'validations' do + let_it_be(:fk_validation) { create(:postgres_async_foreign_key_validation) } + let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH } + let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH } + + subject { fk_validation } + + it { is_expected.to validate_presence_of(:name) } + it { is_expected.to validate_uniqueness_of(:name) } + it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) } + it { is_expected.to validate_presence_of(:table_name) } + it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) } + it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) } + end + + describe 'scopes' do + let!(:failed_validation) { create(:postgres_async_foreign_key_validation, attempts: 1) } + let!(:new_validation) { create(:postgres_async_foreign_key_validation) } + + describe '.ordered' do + subject { described_class.ordered } + + it { is_expected.to eq([new_validation, failed_validation]) } + end + end + + describe '#handle_exception!' do + let_it_be_with_reload(:fk_validation) { create(:postgres_async_foreign_key_validation) } + + let(:error) { instance_double(StandardError, message: 'Oups', backtrace: %w[this that]) } + + subject { fk_validation.handle_exception!(error) } + + it 'increases the attempts number' do + expect { subject }.to change { fk_validation.reload.attempts }.by(1) + end + + it 'saves error details' do + subject + + expect(fk_validation.reload.last_error).to eq("Oups\nthis\nthat") + end + end +end diff --git a/spec/lib/gitlab/database/async_foreign_keys_spec.rb b/spec/lib/gitlab/database/async_foreign_keys_spec.rb new file mode 100644 index 00000000000..f15eb364929 --- /dev/null +++ b/spec/lib/gitlab/database/async_foreign_keys_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncForeignKeys, feature_category: :database do + describe '.validate_pending_entries!' do + subject { described_class.validate_pending_entries! } + + before do + create_list(:postgres_async_foreign_key_validation, 3) + end + + it 'takes 2 pending FK validations and executes them' do + validations = described_class::PostgresAsyncForeignKeyValidation.ordered.limit(2).to_a + + expect_next_instances_of(described_class::ForeignKeyValidator, 2, validations) do |validator| + expect(validator).to receive(:perform) + end + + subject + end + end +end diff --git a/spec/lib/gitlab/database/async_indexes/index_base_spec.rb b/spec/lib/gitlab/database/async_indexes/index_base_spec.rb new file mode 100644 index 00000000000..d6070ff215e --- /dev/null +++ b/spec/lib/gitlab/database/async_indexes/index_base_spec.rb @@ -0,0 +1,81 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::AsyncIndexes::IndexBase, feature_category: :database do + describe '#perform' do + subject { described_class.new(async_index) } + + let(:async_index) { create(:postgres_async_index) } + let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] } + let(:connection) { model.connection } + + around do |example| + Gitlab::Database::SharedModel.using_connection(connection) do + example.run + end + end + + describe '#preconditions_met?' do + it 'raises errors if preconditions is not defined' do + expect { subject.perform }.to raise_error NotImplementedError, 'must implement preconditions_met?' + end + end + + describe '#action_type' do + before do + allow(subject).to receive(:preconditions_met?).and_return(true) + end + + it 'raises errors if action_type is not defined' do + expect { subject.perform }.to raise_error NotImplementedError, 'must implement action_type' + end + end + + context 'with error handling' do + before do + allow(subject).to receive(:preconditions_met?).and_return(true) + allow(subject).to receive(:action_type).and_return('test') + allow(async_index.connection).to receive(:execute).and_call_original + + allow(async_index.connection) + .to receive(:execute) + .with(async_index.definition) + .and_raise(ActiveRecord::StatementInvalid) + end + + context 'on production' do + before do + allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false) + end + + it 'increases execution attempts' do + expect { subject.perform }.to change { async_index.attempts }.by(1) + + expect(async_index.last_error).to be_present + expect(async_index).not_to be_destroyed + end + + it 'logs an error message including the index_name' do + expect(Gitlab::AppLogger) + .to receive(:error) + .with(a_hash_including(:message, :index_name)) + .and_call_original + + subject.perform + end + end + + context 'on development' do + it 'also raises errors' do + expect { subject.perform } + .to raise_error(ActiveRecord::StatementInvalid) + .and change { async_index.attempts }.by(1) + + expect(async_index.last_error).to be_present + expect(async_index).not_to be_destroyed + end + end + end + end +end diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb index 207aedd1a38..51a09ba0b5e 100644 --- a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do +RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator, feature_category: :database do include ExclusiveLeaseHelpers describe '#perform' do @@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do let(:connection) { model.connection } let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) } - let(:lease_key) { "gitlab/database/indexing/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } + let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION } around do |example| @@ -35,6 +35,24 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do subject.perform end + + it 'removes the index preparation record from postgres_async_indexes' do + expect(async_index).to receive(:destroy!).and_call_original + + expect { subject.perform }.to change { index_model.count }.by(-1) + end + + it 'logs an appropriate message' do + expected_message = 'Skipping index creation since preconditions are not met. The queuing entry will be deleted' + + allow(Gitlab::AppLogger).to receive(:info).and_call_original + + subject.perform + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: expected_message)) + end end it 'creates the index while controlling statement timeout' do @@ -47,7 +65,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do end it 'removes the index preparation record from postgres_async_indexes' do - expect(async_index).to receive(:destroy).and_call_original + expect(async_index).to receive(:destroy!).and_call_original expect { subject.perform }.to change { index_model.count }.by(-1) end @@ -55,9 +73,23 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do it 'skips logic if not able to acquire exclusive lease' do expect(lease).to receive(:try_obtain).ordered.and_return(false) expect(connection).not_to receive(:execute).with(/CREATE INDEX/) - expect(async_index).not_to receive(:destroy) + expect(async_index).not_to receive(:destroy!) expect { subject.perform }.not_to change { index_model.count } end + + it 'logs messages around execution' do + allow(Gitlab::AppLogger).to receive(:info).and_call_original + + subject.perform + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: 'Starting async index creation')) + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: 'Finished async index creation')) + end end end diff --git a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb index 11039ad4f7e..7f0febdcacd 100644 --- a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do +RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor, feature_category: :database do include ExclusiveLeaseHelpers describe '#perform' do @@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do let(:connection) { model.connection } let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) } - let(:lease_key) { "gitlab/database/indexing/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } + let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION } before do @@ -39,6 +39,24 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do subject.perform end + + it 'removes the index preparation record from postgres_async_indexes' do + expect(async_index).to receive(:destroy!).and_call_original + + expect { subject.perform }.to change { index_model.count }.by(-1) + end + + it 'logs an appropriate message' do + expected_message = 'Skipping index removal since preconditions are not met. The queuing entry will be deleted' + + allow(Gitlab::AppLogger).to receive(:info).and_call_original + + subject.perform + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: expected_message)) + end end it 'creates the index while controlling lock timeout' do @@ -53,7 +71,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do end it 'removes the index preparation record from postgres_async_indexes' do - expect(async_index).to receive(:destroy).and_call_original + expect(async_index).to receive(:destroy!).and_call_original expect { subject.perform }.to change { index_model.count }.by(-1) end @@ -61,9 +79,23 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do it 'skips logic if not able to acquire exclusive lease' do expect(lease).to receive(:try_obtain).ordered.and_return(false) expect(connection).not_to receive(:execute).with(/DROP INDEX/) - expect(async_index).not_to receive(:destroy) + expect(async_index).not_to receive(:destroy!) expect { subject.perform }.not_to change { index_model.count } end + + it 'logs messages around execution' do + allow(Gitlab::AppLogger).to receive(:info).and_call_original + + subject.perform + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: 'Starting async index removal')) + + expect(Gitlab::AppLogger) + .to have_received(:info) + .with(a_hash_including(message: 'Finished async index removal')) + end end end diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb index 52f5e37eff2..7c5c368fcb5 100644 --- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do +RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers, feature_category: :database do let(:migration) { ActiveRecord::Migration.new.extend(described_class) } let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex } let(:connection) { ApplicationRecord.connection } diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb index 806d57af4b3..5e9d4f78a4a 100644 --- a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb @@ -2,12 +2,13 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model do +RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model, feature_category: :database do it { is_expected.to be_a Gitlab::Database::SharedModel } describe 'validations' do let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH } let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH } + let(:last_error_limit) { described_class::MAX_LAST_ERROR_LENGTH } it { is_expected.to validate_presence_of(:name) } it { is_expected.to validate_length_of(:name).is_at_most(identifier_limit) } @@ -15,11 +16,12 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model it { is_expected.to validate_length_of(:table_name).is_at_most(identifier_limit) } it { is_expected.to validate_presence_of(:definition) } it { is_expected.to validate_length_of(:definition).is_at_most(definition_limit) } + it { is_expected.to validate_length_of(:last_error).is_at_most(last_error_limit) } end describe 'scopes' do - let!(:async_index_creation) { create(:postgres_async_index) } - let!(:async_index_destruction) { create(:postgres_async_index, :with_drop) } + let_it_be(:async_index_creation) { create(:postgres_async_index) } + let_it_be(:async_index_destruction) { create(:postgres_async_index, :with_drop) } describe '.to_create' do subject { described_class.to_create } @@ -32,5 +34,33 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model it { is_expected.to contain_exactly(async_index_destruction) } end + + describe '.ordered' do + before do + async_index_creation.update!(attempts: 3) + end + + subject { described_class.ordered.limit(1) } + + it { is_expected.to contain_exactly(async_index_destruction) } + end + end + + describe '#handle_exception!' do + let_it_be_with_reload(:async_index_creation) { create(:postgres_async_index) } + + let(:error) { instance_double(StandardError, message: 'Oups', backtrace: %w[this that]) } + + subject { async_index_creation.handle_exception!(error) } + + it 'increases the attempts number' do + expect { subject }.to change { async_index_creation.reload.attempts }.by(1) + end + + it 'saves error details' do + subject + + expect(async_index_creation.reload.last_error).to eq("Oups\nthis\nthat") + end end end diff --git a/spec/lib/gitlab/database/async_indexes_spec.rb b/spec/lib/gitlab/database/async_indexes_spec.rb index 8a5509f892f..c6991bf4e06 100644 --- a/spec/lib/gitlab/database/async_indexes_spec.rb +++ b/spec/lib/gitlab/database/async_indexes_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::AsyncIndexes do +RSpec.describe Gitlab::Database::AsyncIndexes, feature_category: :database do describe '.create_pending_indexes!' do subject { described_class.create_pending_indexes! } @@ -11,9 +11,9 @@ RSpec.describe Gitlab::Database::AsyncIndexes do end it 'takes 2 pending indexes and creates those' do - Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.to_create.order(:id).limit(2).each do |index| - creator = double('index creator') - expect(Gitlab::Database::AsyncIndexes::IndexCreator).to receive(:new).with(index).and_return(creator) + indexes = described_class::PostgresAsyncIndex.to_create.order(:id).limit(2).to_a + + expect_next_instances_of(described_class::IndexCreator, 2, indexes) do |creator| expect(creator).to receive(:perform) end @@ -29,13 +29,56 @@ RSpec.describe Gitlab::Database::AsyncIndexes do end it 'takes 2 pending indexes and destroys those' do - Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.to_drop.order(:id).limit(2).each do |index| - destructor = double('index destructor') - expect(Gitlab::Database::AsyncIndexes::IndexDestructor).to receive(:new).with(index).and_return(destructor) + indexes = described_class::PostgresAsyncIndex.to_drop.order(:id).limit(2).to_a + + expect_next_instances_of(described_class::IndexDestructor, 2, indexes) do |destructor| expect(destructor).to receive(:perform) end subject end end + + describe '.execute_pending_actions!' do + subject { described_class.execute_pending_actions!(how_many: how_many) } + + let_it_be(:failed_creation_entry) { create(:postgres_async_index, attempts: 5) } + let_it_be(:failed_removal_entry) { create(:postgres_async_index, :with_drop, attempts: 1) } + let_it_be(:creation_entry) { create(:postgres_async_index) } + let_it_be(:removal_entry) { create(:postgres_async_index, :with_drop) } + + context 'with one entry' do + let(:how_many) { 1 } + + it 'executes instructions ordered by attempts and ids' do + expect { subject } + .to change { queued_entries_exist?(creation_entry) }.to(false) + .and change { described_class::PostgresAsyncIndex.count }.by(-how_many) + end + end + + context 'with two entries' do + let(:how_many) { 2 } + + it 'executes instructions ordered by attempts' do + expect { subject } + .to change { queued_entries_exist?(creation_entry, removal_entry) }.to(false) + .and change { described_class::PostgresAsyncIndex.count }.by(-how_many) + end + end + + context 'when the budget allows more instructions' do + let(:how_many) { 3 } + + it 'retries failed attempts' do + expect { subject } + .to change { queued_entries_exist?(creation_entry, removal_entry, failed_removal_entry) }.to(false) + .and change { described_class::PostgresAsyncIndex.count }.by(-how_many) + end + end + + def queued_entries_exist?(*records) + described_class::PostgresAsyncIndex.where(id: records).exists? + end + end end diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb index 31ae5e9b55d..d132559acea 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb @@ -897,7 +897,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m end it 'doesn not filter by gitlab schemas available for the connection if the column is nor present' do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) expect(described_class).to receive(:gitlab_schema_column_exists?).and_return(false) diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb index 852cc719d01..53f8fe3dcd2 100644 --- a/spec/lib/gitlab/database/batch_count_spec.rb +++ b/spec/lib/gitlab/database/batch_count_spec.rb @@ -58,10 +58,10 @@ RSpec.describe Gitlab::Database::BatchCount do it 'reduces batch size by half and retry fetch' do too_big_batch_relation_mock = instance_double(ActiveRecord::Relation) allow(model).to receive_message_chain(:select, public_send: relation) - allow(relation).to receive(:where).with("id" => 0..calculate_batch_size(batch_size)).and_return(too_big_batch_relation_mock) + allow(relation).to receive(:where).with({ "id" => 0..calculate_batch_size(batch_size) }).and_return(too_big_batch_relation_mock) allow(too_big_batch_relation_mock).to receive(:send).and_raise(ActiveRecord::QueryCanceled) - expect(relation).to receive(:where).with("id" => 0..calculate_batch_size(batch_size / 2)).and_return(double(send: 1)) + expect(relation).to receive(:where).with({ "id" => 0..calculate_batch_size(batch_size / 2) }).and_return(double(send: 1)) subject.call(model, column, batch_size: batch_size, start: 0) end @@ -146,7 +146,7 @@ RSpec.describe Gitlab::Database::BatchCount do allow(model).to receive_message_chain(:select, public_send: relation) batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE) - expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1)) + expect(relation).to receive(:where).with({ "id" => min_id..batch_end_id }).and_return(double(send: 1)) described_class.batch_count(model) end @@ -382,7 +382,7 @@ RSpec.describe Gitlab::Database::BatchCount do allow(model).to receive_message_chain(:select, public_send: relation) batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE) - expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1)) + expect(relation).to receive(:where).with({ "id" => min_id..batch_end_id }).and_return(double(send: 1)) described_class.batch_distinct_count(model) end @@ -468,7 +468,7 @@ RSpec.describe Gitlab::Database::BatchCount do allow(model).to receive_message_chain(:select, public_send: relation) batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE) - expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1)) + expect(relation).to receive(:where).with({ "id" => min_id..batch_end_id }).and_return(double(send: 1)) described_class.batch_sum(model, column) end diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb index 1e316c55786..ff31a5cd6cb 100644 --- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb @@ -11,304 +11,319 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do Gitlab::Database::LoadBalancing::Session.clear_session end - describe '#stick_or_unstick_request' do - it 'sticks or unsticks a single object and updates the Rack environment' do - expect(sticking) - .to receive(:unstick_or_continue_sticking) - .with(:user, 42) - - env = {} - - sticking.stick_or_unstick_request(env, :user, 42) - - expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a) - .to eq([[sticking, :user, 42]]) + shared_examples 'sticking' do + before do + allow(ActiveRecord::Base.load_balancer) + .to receive(:primary_write_location) + .and_return('foo') end - it 'sticks or unsticks multiple objects and updates the Rack environment' do - expect(sticking) - .to receive(:unstick_or_continue_sticking) - .with(:user, 42) - .ordered + it 'sticks an entity to the primary', :aggregate_failures do + allow(ActiveRecord::Base.load_balancer) + .to receive(:primary_only?) + .and_return(false) - expect(sticking) - .to receive(:unstick_or_continue_sticking) - .with(:runner, '123456789') - .ordered + ids.each do |id| + expect(sticking) + .to receive(:set_write_location_for) + .with(:user, id, 'foo') + end - env = {} + expect(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:use_primary!) - sticking.stick_or_unstick_request(env, :user, 42) - sticking.stick_or_unstick_request(env, :runner, '123456789') + subject + end - expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq( - [ - [sticking, :user, 42], - [sticking, :runner, - '123456789'] - ]) + it 'does not update the write location when no replicas are used' do + expect(sticking).not_to receive(:set_write_location_for) + + subject end end - describe '#stick_if_necessary' do - it 'does not stick if no write was performed' do - allow(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:performed_write?) - .and_return(false) + shared_examples 'tracking status in redis' do + describe '#stick_or_unstick_request' do + it 'sticks or unsticks a single object and updates the Rack environment' do + expect(sticking) + .to receive(:unstick_or_continue_sticking) + .with(:user, 42) - expect(sticking).not_to receive(:stick) + env = {} - sticking.stick_if_necessary(:user, 42) - end + sticking.stick_or_unstick_request(env, :user, 42) - it 'sticks to the primary if a write was performed' do - allow(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:performed_write?) - .and_return(true) + expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a) + .to eq([[sticking, :user, 42]]) + end - expect(sticking) - .to receive(:stick) - .with(:user, 42) + it 'sticks or unsticks multiple objects and updates the Rack environment' do + expect(sticking) + .to receive(:unstick_or_continue_sticking) + .with(:user, 42) + .ordered - sticking.stick_if_necessary(:user, 42) - end - end + expect(sticking) + .to receive(:unstick_or_continue_sticking) + .with(:runner, '123456789') + .ordered - describe '#all_caught_up?' do - let(:lb) { ActiveRecord::Base.load_balancer } - let(:last_write_location) { 'foo' } + env = {} - before do - allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original + sticking.stick_or_unstick_request(env, :user, 42) + sticking.stick_or_unstick_request(env, :runner, '123456789') - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return(last_write_location) + expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq( + [ + [sticking, :user, 42], + [sticking, :runner, + '123456789'] + ]) + end end - context 'when no write location could be found' do - let(:last_write_location) { nil } + describe '#stick_if_necessary' do + it 'does not stick if no write was performed' do + allow(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:performed_write?) + .and_return(false) - it 'returns true' do - expect(lb).not_to receive(:select_up_to_date_host) + expect(sticking).not_to receive(:stick) - expect(sticking.all_caught_up?(:user, 42)).to eq(true) + sticking.stick_if_necessary(:user, 42) end - end - context 'when all secondaries have caught up' do - before do - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true) - end + it 'sticks to the primary if a write was performed' do + allow(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:performed_write?) + .and_return(true) - it 'returns true, and unsticks' do expect(sticking) - .to receive(:unstick) + .to receive(:stick) .with(:user, 42) - expect(sticking.all_caught_up?(:user, 42)).to eq(true) - end - - it 'notifies with the proper event payload' do - expect(ActiveSupport::Notifications) - .to receive(:instrument) - .with('caught_up_replica_pick.load_balancing', { result: true }) - .and_call_original - - sticking.all_caught_up?(:user, 42) + sticking.stick_if_necessary(:user, 42) end end - context 'when the secondaries have not yet caught up' do + describe '#all_caught_up?' do + let(:lb) { ActiveRecord::Base.load_balancer } + let(:last_write_location) { 'foo' } + before do - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false) - end + allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original - it 'returns false' do - expect(sticking.all_caught_up?(:user, 42)).to eq(false) + allow(sticking) + .to receive(:last_write_location_for) + .with(:user, 42) + .and_return(last_write_location) end - it 'notifies with the proper event payload' do - expect(ActiveSupport::Notifications) - .to receive(:instrument) - .with('caught_up_replica_pick.load_balancing', { result: false }) - .and_call_original + context 'when no write location could be found' do + let(:last_write_location) { nil } + + it 'returns true' do + expect(lb).not_to receive(:select_up_to_date_host) - sticking.all_caught_up?(:user, 42) + expect(sticking.all_caught_up?(:user, 42)).to eq(true) + end end - end - end - describe '#unstick_or_continue_sticking' do - let(:lb) { ActiveRecord::Base.load_balancer } + context 'when all secondaries have caught up' do + before do + allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true) + end - it 'simply returns if no write location could be found' do - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return(nil) + it 'returns true, and unsticks' do + expect(sticking) + .to receive(:unstick) + .with(:user, 42) - expect(lb).not_to receive(:select_up_to_date_host) + expect(sticking.all_caught_up?(:user, 42)).to eq(true) + end - sticking.unstick_or_continue_sticking(:user, 42) - end + it 'notifies with the proper event payload' do + expect(ActiveSupport::Notifications) + .to receive(:instrument) + .with('caught_up_replica_pick.load_balancing', { result: true }) + .and_call_original - it 'unsticks if all secondaries have caught up' do - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return('foo') + sticking.all_caught_up?(:user, 42) + end + end - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true) + context 'when the secondaries have not yet caught up' do + before do + allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false) + end - expect(sticking) - .to receive(:unstick) - .with(:user, 42) + it 'returns false' do + expect(sticking.all_caught_up?(:user, 42)).to eq(false) + end - sticking.unstick_or_continue_sticking(:user, 42) + it 'notifies with the proper event payload' do + expect(ActiveSupport::Notifications) + .to receive(:instrument) + .with('caught_up_replica_pick.load_balancing', { result: false }) + .and_call_original + + sticking.all_caught_up?(:user, 42) + end + end end - it 'continues using the primary if the secondaries have not yet caught up' do - allow(sticking) - .to receive(:last_write_location_for) - .with(:user, 42) - .and_return('foo') + describe '#unstick_or_continue_sticking' do + let(:lb) { ActiveRecord::Base.load_balancer } - allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false) + it 'simply returns if no write location could be found' do + allow(sticking) + .to receive(:last_write_location_for) + .with(:user, 42) + .and_return(nil) - expect(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:use_primary!) + expect(lb).not_to receive(:select_up_to_date_host) - sticking.unstick_or_continue_sticking(:user, 42) - end - end + sticking.unstick_or_continue_sticking(:user, 42) + end - RSpec.shared_examples 'sticking' do - before do - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_write_location) - .and_return('foo') - end + it 'unsticks if all secondaries have caught up' do + allow(sticking) + .to receive(:last_write_location_for) + .with(:user, 42) + .and_return('foo') - it 'sticks an entity to the primary', :aggregate_failures do - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_only?) - .and_return(false) + allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true) - ids.each do |id| expect(sticking) - .to receive(:set_write_location_for) - .with(:user, id, 'foo') + .to receive(:unstick) + .with(:user, 42) + + sticking.unstick_or_continue_sticking(:user, 42) end - expect(Gitlab::Database::LoadBalancing::Session.current) - .to receive(:use_primary!) + it 'continues using the primary if the secondaries have not yet caught up' do + allow(sticking) + .to receive(:last_write_location_for) + .with(:user, 42) + .and_return('foo') - subject - end + allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false) - it 'does not update the write location when no replicas are used' do - expect(sticking).not_to receive(:set_write_location_for) + expect(Gitlab::Database::LoadBalancing::Session.current) + .to receive(:use_primary!) - subject + sticking.unstick_or_continue_sticking(:user, 42) + end end - end - describe '#stick' do - it_behaves_like 'sticking' do - let(:ids) { [42] } - subject { sticking.stick(:user, ids.first) } + describe '#stick' do + it_behaves_like 'sticking' do + let(:ids) { [42] } + subject { sticking.stick(:user, ids.first) } + end end - end - describe '#bulk_stick' do - it_behaves_like 'sticking' do - let(:ids) { [42, 43] } - subject { sticking.bulk_stick(:user, ids) } + describe '#bulk_stick' do + it_behaves_like 'sticking' do + let(:ids) { [42, 43] } + subject { sticking.bulk_stick(:user, ids) } + end end - end - describe '#mark_primary_write_location' do - it 'updates the write location with the load balancer' do - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_write_location) - .and_return('foo') + describe '#mark_primary_write_location' do + it 'updates the write location with the load balancer' do + allow(ActiveRecord::Base.load_balancer) + .to receive(:primary_write_location) + .and_return('foo') - allow(ActiveRecord::Base.load_balancer) - .to receive(:primary_only?) - .and_return(false) + allow(ActiveRecord::Base.load_balancer) + .to receive(:primary_only?) + .and_return(false) + + expect(sticking) + .to receive(:set_write_location_for) + .with(:user, 42, 'foo') + + sticking.mark_primary_write_location(:user, 42) + end - expect(sticking) - .to receive(:set_write_location_for) - .with(:user, 42, 'foo') + it 'does nothing when no replicas are used' do + expect(sticking).not_to receive(:set_write_location_for) - sticking.mark_primary_write_location(:user, 42) + sticking.mark_primary_write_location(:user, 42) + end end - it 'does nothing when no replicas are used' do - expect(sticking).not_to receive(:set_write_location_for) + describe '#unstick' do + it 'removes the sticking data from Redis' do + sticking.set_write_location_for(:user, 4, 'foo') + sticking.unstick(:user, 4) - sticking.mark_primary_write_location(:user, 42) + expect(sticking.last_write_location_for(:user, 4)).to be_nil + end end - end - describe '#unstick' do - it 'removes the sticking data from Redis' do - sticking.set_write_location_for(:user, 4, 'foo') - sticking.unstick(:user, 4) + describe '#last_write_location_for' do + it 'returns the last WAL write location for a user' do + sticking.set_write_location_for(:user, 4, 'foo') - expect(sticking.last_write_location_for(:user, 4)).to be_nil + expect(sticking.last_write_location_for(:user, 4)).to eq('foo') + end end - end - describe '#last_write_location_for' do - it 'returns the last WAL write location for a user' do - sticking.set_write_location_for(:user, 4, 'foo') + describe '#select_caught_up_replicas' do + let(:lb) { ActiveRecord::Base.load_balancer } + + context 'with no write location' do + before do + allow(sticking) + .to receive(:last_write_location_for) + .with(:project, 42) + .and_return(nil) + end + + it 'returns false and does not try to find caught up hosts' do + expect(lb).not_to receive(:select_up_to_date_host) + expect(sticking.select_caught_up_replicas(:project, 42)).to be false + end + end - expect(sticking.last_write_location_for(:user, 4)).to eq('foo') + context 'with write location' do + before do + allow(sticking) + .to receive(:last_write_location_for) + .with(:project, 42) + .and_return('foo') + end + + it 'returns true, selects hosts, and unsticks if any secondary has caught up' do + expect(lb).to receive(:select_up_to_date_host).and_return(true) + expect(sticking) + .to receive(:unstick) + .with(:project, 42) + expect(sticking.select_caught_up_replicas(:project, 42)).to be true + end + end end end - describe '#redis_key_for' do - it 'returns a String' do - expect(sticking.redis_key_for(:user, 42)) - .to eq('database-load-balancing/write-location/main/user/42') - end + context 'with multi-store feature flags turned on' do + it_behaves_like 'tracking status in redis' end - describe '#select_caught_up_replicas' do - let(:lb) { ActiveRecord::Base.load_balancer } - - context 'with no write location' do - before do - allow(sticking) - .to receive(:last_write_location_for) - .with(:project, 42) - .and_return(nil) - end - - it 'returns false and does not try to find caught up hosts' do - expect(lb).not_to receive(:select_up_to_date_host) - expect(sticking.select_caught_up_replicas(:project, 42)).to be false - end + context 'when both multi-store feature flags are off' do + before do + stub_feature_flags(use_primary_and_secondary_stores_for_db_load_balancing: false) + stub_feature_flags(use_primary_store_as_default_for_db_load_balancing: false) end - context 'with write location' do - before do - allow(sticking) - .to receive(:last_write_location_for) - .with(:project, 42) - .and_return('foo') - end + it_behaves_like 'tracking status in redis' + end - it 'returns true, selects hosts, and unsticks if any secondary has caught up' do - expect(lb).to receive(:select_up_to_date_host).and_return(true) - expect(sticking) - .to receive(:unstick) - .with(:project, 42) - expect(sticking.select_caught_up_replicas(:project, 42)).to be true - end + describe '#redis_key_for' do + it 'returns a String' do + expect(sticking.redis_key_for(:user, 42)) + .to eq('database-load-balancing/write-location/main/user/42') end end end diff --git a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb index 1eb077fe6ca..56fbaef031d 100644 --- a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis, :delete do +RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis, :delete, feature_category: :database do # rubocop:disable Layout/LineLength include StubENV let(:model) { ActiveRecord::Base } let(:db_host) { model.connection_pool.db_config.host } @@ -55,50 +55,8 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis conn.execute("INSERT INTO #{test_table_name} (value) VALUES (2)") end - context 'with the PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION environment variable not set' do - it 'logs a warning when violating transaction semantics with writes' do - conn = model.connection - - expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak)) - expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :read_write_retry)) - - conn.transaction do - expect(conn).to be_transaction_open - - execute(conn) - - expect(conn).not_to be_transaction_open - end - - values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] } - expect(values).to contain_exactly(2) # Does not include 1 because the transaction was aborted and leaked - end - - it 'does not log a warning when no transaction is open to be leaked' do - conn = model.connection - - expect(::Gitlab::Database::LoadBalancing::Logger) - .not_to receive(:warn).with(hash_including(event: :transaction_leak)) - expect(::Gitlab::Database::LoadBalancing::Logger) - .to receive(:warn).with(hash_including(event: :read_write_retry)) - - expect(conn).not_to be_transaction_open - - execute(conn) - - expect(conn).not_to be_transaction_open - - values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] } - expect(values).to contain_exactly(1, 2) # Includes both rows because there was no transaction to roll back - end - end - - context 'with the PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION environment variable set' do - before do - stub_env('PREVENT_LOAD_BALANCER_RETRIES_IN_TRANSACTION' => '1') - end - - it 'raises an exception when a retry would occur during a transaction' do + context 'in a transaction' do + it 'raises an exception when a retry would occur' do expect(::Gitlab::Database::LoadBalancing::Logger) .not_to receive(:warn).with(hash_including(event: :transaction_leak)) @@ -108,8 +66,10 @@ RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis end end.to raise_error(ActiveRecord::StatementInvalid) { |e| expect(e.cause).to be_a(PG::ConnectionBad) } end + end - it 'retries when not in a transaction' do + context 'without a transaction' do + it 'retries' do expect(::Gitlab::Database::LoadBalancing::Logger) .not_to receive(:warn).with(hash_including(event: :transaction_leak)) expect(::Gitlab::Database::LoadBalancing::Logger) diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb index 1c85abac91c..59e16e6ca8b 100644 --- a/spec/lib/gitlab/database/load_balancing_spec.rb +++ b/spec/lib/gitlab/database/load_balancing_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validate_connection do +RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validate_connection, feature_category: :pods do describe '.base_models' do it 'returns the models to apply load balancing to' do models = described_class.base_models diff --git a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb index 089c7a779f2..be9346e3829 100644 --- a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb @@ -86,7 +86,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, let(:create_gitlab_shared_table_migration_class) { create_table_migration(gitlab_shared_table_name) } before do - skip_if_multiple_databases_are_setup + skip_if_multiple_databases_are_setup(:ci) end it 'does not lock any newly created tables' do @@ -106,7 +106,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when multiple databases' do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) end let(:migration_class) { create_table_migration(table_name, skip_automatic_lock_on_writes) } @@ -238,7 +238,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when renaming a table' do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) create_table_migration(old_table_name).migrate(:up) # create the table first before renaming it end @@ -277,7 +277,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, let(:config_model) { Gitlab::Database.database_base_models[:main] } before do - skip_if_multiple_databases_are_setup + skip_if_multiple_databases_are_setup(:ci) end it 'does not lock any newly created tables' do @@ -305,7 +305,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables, context 'when multiple databases' do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) migration_class.connection.execute("CREATE TABLE #{table_name}()") migration_class.migrate(:up) end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index 12fa115cc4e..9df23776be8 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -1047,59 +1047,63 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end describe '#foreign_key_exists?' do + let(:referenced_table_name) { '_test_gitlab_main_referenced' } + let(:referencing_table_name) { '_test_gitlab_main_referencing' } + before do model.connection.execute(<<~SQL) - create table referenced ( + create table #{referenced_table_name} ( id bigserial primary key not null ); - create table referencing ( + create table #{referencing_table_name} ( id bigserial primary key not null, non_standard_id bigint not null, - constraint fk_referenced foreign key (non_standard_id) references referenced(id) on delete cascade + constraint fk_referenced foreign key (non_standard_id) + references #{referenced_table_name}(id) on delete cascade ); SQL end shared_examples_for 'foreign key checks' do it 'finds existing foreign keys by column' do - expect(model.foreign_key_exists?(:referencing, target_table, column: :non_standard_id)).to be_truthy + expect(model.foreign_key_exists?(referencing_table_name, target_table, column: :non_standard_id)).to be_truthy end it 'finds existing foreign keys by name' do - expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced)).to be_truthy + expect(model.foreign_key_exists?(referencing_table_name, target_table, name: :fk_referenced)).to be_truthy end it 'finds existing foreign_keys by name and column' do - expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id)).to be_truthy + expect(model.foreign_key_exists?(referencing_table_name, target_table, name: :fk_referenced, column: :non_standard_id)).to be_truthy end it 'finds existing foreign_keys by name, column and on_delete' do - expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :cascade)).to be_truthy + expect(model.foreign_key_exists?(referencing_table_name, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :cascade)).to be_truthy end it 'finds existing foreign keys by target table only' do - expect(model.foreign_key_exists?(:referencing, target_table)).to be_truthy + expect(model.foreign_key_exists?(referencing_table_name, target_table)).to be_truthy end it 'compares by column name if given' do - expect(model.foreign_key_exists?(:referencing, target_table, column: :user_id)).to be_falsey + expect(model.foreign_key_exists?(referencing_table_name, target_table, column: :user_id)).to be_falsey end it 'compares by target column name if given' do - expect(model.foreign_key_exists?(:referencing, target_table, primary_key: :user_id)).to be_falsey - expect(model.foreign_key_exists?(:referencing, target_table, primary_key: :id)).to be_truthy + expect(model.foreign_key_exists?(referencing_table_name, target_table, primary_key: :user_id)).to be_falsey + expect(model.foreign_key_exists?(referencing_table_name, target_table, primary_key: :id)).to be_truthy end it 'compares by foreign key name if given' do - expect(model.foreign_key_exists?(:referencing, target_table, name: :non_existent_foreign_key_name)).to be_falsey + expect(model.foreign_key_exists?(referencing_table_name, target_table, name: :non_existent_foreign_key_name)).to be_falsey end it 'compares by foreign key name and column if given' do - expect(model.foreign_key_exists?(:referencing, target_table, name: :non_existent_foreign_key_name, column: :non_standard_id)).to be_falsey + expect(model.foreign_key_exists?(referencing_table_name, target_table, name: :non_existent_foreign_key_name, column: :non_standard_id)).to be_falsey end it 'compares by foreign key name, column and on_delete if given' do - expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :nullify)).to be_falsey + expect(model.foreign_key_exists?(referencing_table_name, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :nullify)).to be_falsey end end @@ -1110,7 +1114,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end context 'specifying a target table' do - let(:target_table) { :referenced } + let(:target_table) { referenced_table_name } it_behaves_like 'foreign key checks' end @@ -1121,64 +1125,78 @@ RSpec.describe Gitlab::Database::MigrationHelpers do it 'raises an error if an invalid on_delete is specified' do # The correct on_delete key is "nullify" - expect { model.foreign_key_exists?(:referenced, on_delete: :set_null) }.to raise_error(ArgumentError) + expect { model.foreign_key_exists?(referenced_table_name, on_delete: :set_null) }.to raise_error(ArgumentError) end context 'with foreign key using multiple columns' do + let(:p_referenced_table_name) { '_test_gitlab_main_p_referenced' } + let(:p_referencing_table_name) { '_test_gitlab_main_p_referencing' } + before do model.connection.execute(<<~SQL) - create table p_referenced ( - id bigserial not null, - partition_number bigint not null default 100, - primary key (partition_number, id) - ); - create table p_referencing ( - id bigserial primary key not null, - partition_number bigint not null, - constraint fk_partitioning foreign key (partition_number, id) references p_referenced(partition_number, id) on delete cascade - ); + create table #{p_referenced_table_name} ( + id bigserial not null, + partition_number bigint not null default 100, + primary key (partition_number, id) + ); + create table #{p_referencing_table_name} ( + id bigserial primary key not null, + partition_number bigint not null, + constraint fk_partitioning foreign key (partition_number, id) + references #{p_referenced_table_name} (partition_number, id) on delete cascade + ); SQL end it 'finds existing foreign keys by columns' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, column: [:partition_number, :id])).to be_truthy + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + column: [:partition_number, :id])).to be_truthy end it 'finds existing foreign keys by name' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning)).to be_truthy + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + name: :fk_partitioning)).to be_truthy end it 'finds existing foreign_keys by name and column' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id])).to be_truthy + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + name: :fk_partitioning, column: [:partition_number, :id])).to be_truthy end it 'finds existing foreign_keys by name, column and on_delete' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id], on_delete: :cascade)).to be_truthy + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + name: :fk_partitioning, column: [:partition_number, :id], on_delete: :cascade)).to be_truthy end it 'finds existing foreign keys by target table only' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced)).to be_truthy + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name)).to be_truthy end it 'compares by column name if given' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, column: :id)).to be_falsey + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + column: :id)).to be_falsey end it 'compares by target column name if given' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, primary_key: :user_id)).to be_falsey - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, primary_key: [:partition_number, :id])).to be_truthy + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + primary_key: :user_id)).to be_falsey + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + primary_key: [:partition_number, :id])).to be_truthy end it 'compares by foreign key name if given' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :non_existent_foreign_key_name)).to be_falsey + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + name: :non_existent_foreign_key_name)).to be_falsey end it 'compares by foreign key name and column if given' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :non_existent_foreign_key_name, column: [:partition_number, :id])).to be_falsey + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + name: :non_existent_foreign_key_name, column: [:partition_number, :id])).to be_falsey end it 'compares by foreign key name, column and on_delete if given' do - expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id], on_delete: :nullify)).to be_falsey + expect(model.foreign_key_exists?(p_referencing_table_name, p_referenced_table_name, + name: :fk_partitioning, column: [:partition_number, :id], on_delete: :nullify)).to be_falsey end end end diff --git a/spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb b/spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb index 97b432406eb..1365adb8993 100644 --- a/spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb +++ b/spec/lib/gitlab/database/migrations/batched_migration_last_id_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Migrations::BatchedMigrationLastId, feature_category: :pipeline_insights do +RSpec.describe Gitlab::Database::Migrations::BatchedMigrationLastId, feature_category: :database do subject(:test_sampling) { described_class.new(connection, base_dir) } let(:base_dir) { Pathname.new(Dir.mktmpdir) } diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb index b0bdbf5c371..4f347034c0b 100644 --- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb +++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb @@ -18,6 +18,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do let(:migration_name) { 'test' } let(:migration_version) { '12345' } let(:migration_meta) { { 'max_batch_size' => 1, 'total_tuple_count' => 10, 'interval' => 60 } } + let(:expected_json_keys) { %w[version name walltime success total_database_size_change query_statistics] } it 'executes the given block' do expect do |b| @@ -81,7 +82,10 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do expect(subject.success).to be_truthy expect(subject.version).to eq(migration_version) expect(subject.name).to eq(migration_name) - expect(subject.meta).to eq(migration_meta) + end + + it 'transforms observation to expected json' do + expect(Gitlab::Json.parse(subject.to_json).keys).to contain_exactly(*expected_json_keys) end end @@ -114,7 +118,10 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do expect(subject['success']).to be_falsey expect(subject['version']).to eq(migration_version) expect(subject['name']).to eq(migration_name) - expect(subject['meta']).to include(migration_meta) + end + + it 'transforms observation to expected json' do + expect(Gitlab::Json.parse(subject.to_json).keys).to contain_exactly(*expected_json_keys) end end end diff --git a/spec/lib/gitlab/database/migrations/observers/batch_details_spec.rb b/spec/lib/gitlab/database/migrations/observers/batch_details_spec.rb new file mode 100644 index 00000000000..5b3c23736ed --- /dev/null +++ b/spec/lib/gitlab/database/migrations/observers/batch_details_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::Observers::BatchDetails, feature_category: :database do + subject(:observe) { described_class.new(observation, path, connection) } + + let(:connection) { ActiveRecord::Migration.connection } + let(:observation) { Gitlab::Database::Migrations::Observation.new(meta: meta) } + let(:path) { Dir.mktmpdir } + let(:file_name) { 'batch-details.json' } + let(:file_path) { Pathname.new(path).join(file_name) } + let(:json_file) { Gitlab::Json.parse(File.read(file_path)) } + let(:job_meta) do + { "min_value" => 1, "max_value" => 19, "batch_size" => 20, "sub_batch_size" => 5, "pause_ms" => 100 } + end + + where(:meta, :expected_keys) do + [ + [lazy { { job_meta: job_meta } }, %w[time_spent min_value max_value batch_size sub_batch_size pause_ms]], + [nil, %w[time_spent]], + [{ job_meta: nil }, %w[time_spent]] + ] + end + + with_them do + before do + observe.before + observe.after + end + + after do + FileUtils.remove_entry(path) + end + + it 'records expected information to file' do + observe.record + + expect(json_file.keys).to match_array(expected_keys) + end + end +end diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb index 0b048617ce1..57c5011590c 100644 --- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb +++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freeze_time do +RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freeze_time, feature_category: :database do include Gitlab::Database::MigrationHelpers include Database::MigrationTestingHelpers @@ -45,18 +45,15 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez end with_them do - let(:result_dir) { Dir.mktmpdir } + let(:result_dir) { Pathname.new(Dir.mktmpdir) } + let(:connection) { base_model.connection } + let(:table_name) { "_test_column_copying" } + let(:from_id) { 0 } after do FileUtils.rm_rf(result_dir) end - let(:connection) { base_model.connection } - - let(:table_name) { "_test_column_copying" } - - let(:from_id) { 0 } - before do connection.execute(<<~SQL) CREATE TABLE #{table_name} ( @@ -70,26 +67,15 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez context 'running a real background migration' do let(:interval) { 5.minutes } - let(:meta) { { "max_batch_size" => nil, "total_tuple_count" => nil, "interval" => interval } } - - let(:params) do - { - version: nil, - connection: connection, - meta: { - interval: 300, - max_batch_size: nil, - total_tuple_count: nil - } - } - end + let(:params) { { version: nil, connection: connection } } + let(:migration_name) { 'CopyColumnUsingBackgroundMigrationJob' } + let(:migration_file_path) { result_dir.join('CopyColumnUsingBackgroundMigrationJob', 'details.json') } + let(:json_file) { Gitlab::Json.parse(File.read(migration_file_path)) } + let(:expected_file_keys) { %w[interval total_tuple_count max_batch_size] } before do - queue_migration('CopyColumnUsingBackgroundMigrationJob', - table_name, :id, - :id, :data, - batch_size: 100, - job_interval: interval) # job_interval is skipped when testing + # job_interval is skipped when testing + queue_migration(migration_name, table_name, :id, :id, :data, batch_size: 100, job_interval: interval) end subject(:sample_migration) do @@ -113,6 +99,20 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez subject end + + it 'uses the filtering clause from the migration' do + expect_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |s| + expect(s).to receive(:filter_batch).at_least(:once).and_call_original + end + + subject + end + + it 'exports migration details to a file' do + subject + + expect(json_file.keys).to match_array(expected_file_keys) + end end context 'with jobs to run' do diff --git a/spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb b/spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb index d35211af680..ee63ea7174b 100644 --- a/spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/timeout_helpers_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Migrations::TimeoutHelpers do +RSpec.describe Gitlab::Database::Migrations::TimeoutHelpers, feature_category: :database do let(:model) do ActiveRecord::Migration.new.extend(described_class) end diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb index 8027990a546..2212cb09888 100644 --- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb +++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb @@ -6,22 +6,14 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do include Database::PartitioningHelpers include ExclusiveLeaseHelpers - def has_partition(model, month) - Gitlab::Database::PostgresPartition.for_parent_table(model.table_name).any? do |partition| - Gitlab::Database::Partitioning::TimePartition.from_sql( - model.table_name, - partition.name, - partition.condition - ).from == month - end - end + let(:partitioned_table_name) { "_test_gitlab_main_my_model_example_table" } context 'creating partitions (mocked)' do subject(:sync_partitions) { described_class.new(model).sync_partitions } let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) } let(:connection) { ActiveRecord::Base.connection } - let(:table) { "my_model_example_table" } + let(:table) { partitioned_table_name } let(:partitioning_strategy) do double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) end @@ -57,7 +49,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do let(:connection) { Ci::ApplicationRecord.connection } it 'uses the explicitly provided connection when any' do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) expect(connection).to receive(:execute).with("LOCK TABLE \"#{table}\" IN ACCESS EXCLUSIVE MODE") expect(connection).to receive(:execute).with(partitions.first.to_sql) @@ -102,14 +94,14 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do Class.new(ApplicationRecord) do include PartitionedTable - self.table_name = 'my_model_example_table' - partitioned_by :created_at, strategy: :monthly end end before do - create_partitioned_table(connection, 'my_model_example_table') + my_model.table_name = partitioned_table_name + + create_partitioned_table(connection, partitioned_table_name) end it 'creates partitions' do @@ -184,27 +176,27 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do Class.new(ApplicationRecord) do include PartitionedTable - self.table_name = 'my_model_example_table' - partitioned_by :created_at, strategy: :monthly, retain_for: 1.month end end before do connection.execute(<<~SQL) - CREATE TABLE my_model_example_table + CREATE TABLE #{partitioned_table_name} (id serial not null, created_at timestamptz not null, primary key (id, created_at)) PARTITION BY RANGE (created_at); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.my_model_example_table_202104 - PARTITION OF my_model_example_table + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{partitioned_table_name}_202104 + PARTITION OF #{partitioned_table_name} FOR VALUES FROM ('2021-04-01') TO ('2021-05-01'); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.my_model_example_table_202105 - PARTITION OF my_model_example_table + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{partitioned_table_name}_202105 + PARTITION OF #{partitioned_table_name} FOR VALUES FROM ('2021-05-01') TO ('2021-06-01'); SQL + my_model.table_name = partitioned_table_name + # Also create all future partitions so that the sync is only trying to detach old partitions my_model.partitioning_strategy.missing_partitions.each do |p| connection.execute p.to_sql @@ -234,7 +226,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do it 'creates the appropriate PendingPartitionDrop entry' do subject - pending_drop = Postgresql::DetachedPartition.find_by!(table_name: 'my_model_example_table_202104') + pending_drop = Postgresql::DetachedPartition.find_by!(table_name: "#{partitioned_table_name}_202104") expect(pending_drop.drop_after).to eq(Time.current + described_class::RETAIN_DETACHED_PARTITIONS_FOR) end @@ -243,11 +235,11 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do context 'when the model is the target of a foreign key' do before do connection.execute(<<~SQL) - create unique index idx_for_fk ON my_model_example_table(created_at); + create unique index idx_for_fk ON #{partitioned_table_name}(created_at); - create table referencing_table ( + create table _test_gitlab_main_referencing_table ( id bigserial primary key not null, - referencing_created_at timestamptz references my_model_example_table(created_at) + referencing_created_at timestamptz references #{partitioned_table_name}(created_at) ); SQL end @@ -265,15 +257,15 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do Class.new(ApplicationRecord) do include PartitionedTable - self.table_name = 'my_model_example_table' - partitioned_by :created_at, strategy: :monthly, retain_for: 1.month end end before do + my_model.table_name = partitioned_table_name + connection.execute(<<~SQL) - CREATE TABLE my_model_example_table + CREATE TABLE #{partitioned_table_name} (id serial not null, created_at timestamptz not null, primary key (id, created_at)) PARTITION BY RANGE (created_at); SQL @@ -294,6 +286,16 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do end end + def has_partition(model, month) + Gitlab::Database::PostgresPartition.for_parent_table(model.table_name).any? do |partition| + Gitlab::Database::Partitioning::TimePartition.from_sql( + model.table_name, + partition.name, + partition.condition + ).from == month + end + end + def create_partitioned_table(connection, table) connection.execute(<<~SQL) CREATE TABLE #{table} diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb index 855d0bc46a4..ae74ee60a4b 100644 --- a/spec/lib/gitlab/database/partitioning_spec.rb +++ b/spec/lib/gitlab/database/partitioning_spec.rb @@ -117,7 +117,7 @@ RSpec.describe Gitlab::Database::Partitioning do end it 'creates partitions in each database' do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) expect { described_class.sync_partitions(models) } .to change { find_partitions(table_names.first, conn: connection).size }.from(0) @@ -176,7 +176,7 @@ RSpec.describe Gitlab::Database::Partitioning do end it 'manages partitions for models for the given database', :aggregate_failures do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) expect { described_class.sync_partitions([models.first, ci_model], only_on: 'ci') } .to change { find_partitions(ci_model.table_name, conn: ci_connection).size }.from(0) diff --git a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb index a8dbc4be16f..ae56f66737d 100644 --- a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb +++ b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb @@ -6,26 +6,32 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ # PostgresForeignKey does not `behaves_like 'a postgres model'` because it does not correspond 1-1 with a single entry # in pg_class + let(:table_prefix) { '_test_gitlab_main' } + before do ApplicationRecord.connection.execute(<<~SQL) - CREATE TABLE public.referenced_table ( + CREATE TABLE #{schema_table_name('referenced_table')} ( id bigserial primary key not null, id_b bigserial not null, UNIQUE (id, id_b) ); - CREATE TABLE public.other_referenced_table ( + CREATE TABLE #{schema_table_name('other_referenced_table')} ( id bigserial primary key not null ); - CREATE TABLE public.constrained_table ( + CREATE TABLE #{schema_table_name('constrained_table')} ( id bigserial primary key not null, referenced_table_id bigint not null, referenced_table_id_b bigint not null, other_referenced_table_id bigint not null, - CONSTRAINT fk_constrained_to_referenced FOREIGN KEY(referenced_table_id, referenced_table_id_b) REFERENCES referenced_table(id, id_b) on delete restrict, + CONSTRAINT fk_constrained_to_referenced + FOREIGN KEY(referenced_table_id, referenced_table_id_b) + REFERENCES #{table_name('referenced_table')}(id, id_b) + ON DELETE restrict + ON UPDATE restrict, CONSTRAINT fk_constrained_to_other_referenced FOREIGN KEY(other_referenced_table_id) - REFERENCES other_referenced_table(id) + REFERENCES #{table_name('other_referenced_table')}(id) ); SQL @@ -33,13 +39,13 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ describe '#by_referenced_table_identifier' do it 'throws an error when the identifier name is not fully qualified' do - expect { described_class.by_referenced_table_identifier('referenced_table') }.to raise_error(ArgumentError, /not fully qualified/) + expect { described_class.by_referenced_table_identifier(table_name("referenced_table")) }.to raise_error(ArgumentError, /not fully qualified/) end it 'finds the foreign keys for the referenced table' do expected = described_class.find_by!(name: 'fk_constrained_to_referenced') - expect(described_class.by_referenced_table_identifier('public.referenced_table')).to contain_exactly(expected) + expect(described_class.by_referenced_table_identifier(schema_table_name("referenced_table"))).to contain_exactly(expected) end end @@ -47,19 +53,19 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ it 'finds the foreign keys for the referenced table' do expected = described_class.find_by!(name: 'fk_constrained_to_referenced') - expect(described_class.by_referenced_table_name('referenced_table')).to contain_exactly(expected) + expect(described_class.by_referenced_table_name(table_name("referenced_table"))).to contain_exactly(expected) end end describe '#by_constrained_table_identifier' do it 'throws an error when the identifier name is not fully qualified' do - expect { described_class.by_constrained_table_identifier('constrained_table') }.to raise_error(ArgumentError, /not fully qualified/) + expect { described_class.by_constrained_table_identifier(table_name("constrained_table")) }.to raise_error(ArgumentError, /not fully qualified/) end it 'finds the foreign keys for the constrained table' do expected = described_class.where(name: %w[fk_constrained_to_referenced fk_constrained_to_other_referenced]).to_a - expect(described_class.by_constrained_table_identifier('public.constrained_table')).to match_array(expected) + expect(described_class.by_constrained_table_identifier(schema_table_name('constrained_table'))).to match_array(expected) end end @@ -67,7 +73,7 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ it 'finds the foreign keys for the constrained table' do expected = described_class.where(name: %w[fk_constrained_to_referenced fk_constrained_to_other_referenced]).to_a - expect(described_class.by_constrained_table_name('constrained_table')).to match_array(expected) + expect(described_class.by_constrained_table_name(table_name("constrained_table"))).to match_array(expected) end end @@ -80,7 +86,7 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ context 'when finding columns for foreign keys' do using RSpec::Parameterized::TableSyntax - let(:fks) { described_class.by_constrained_table_name('constrained_table') } + let(:fks) { described_class.by_constrained_table_name(table_name("constrained_table")) } where(:fk, :expected_constrained, :expected_referenced) do lazy { described_class.find_by(name: 'fk_constrained_to_referenced') } | %w[referenced_table_id referenced_table_id_b] | %w[id id_b] @@ -110,25 +116,34 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end end - describe '#on_delete_action' do + describe '#on_delete_action and #on_update_action' do before do ApplicationRecord.connection.execute(<<~SQL) - create table public.referenced_table_all_on_delete_actions ( + create table #{schema_table_name('referenced_table_all_on_delete_actions')} ( id bigserial primary key not null ); - create table public.constrained_table_all_on_delete_actions ( + create table #{schema_table_name('constrained_table_all_on_delete_actions')} ( id bigserial primary key not null, - ref_id_no_action bigint not null constraint fk_no_action references referenced_table_all_on_delete_actions(id), - ref_id_restrict bigint not null constraint fk_restrict references referenced_table_all_on_delete_actions(id) on delete restrict, - ref_id_nullify bigint not null constraint fk_nullify references referenced_table_all_on_delete_actions(id) on delete set null, - ref_id_cascade bigint not null constraint fk_cascade references referenced_table_all_on_delete_actions(id) on delete cascade, - ref_id_set_default bigint not null constraint fk_set_default references referenced_table_all_on_delete_actions(id) on delete set default + ref_id_no_action bigint not null constraint fk_no_action + references #{table_name('referenced_table_all_on_delete_actions')}(id), + ref_id_restrict bigint not null constraint fk_restrict + references #{table_name('referenced_table_all_on_delete_actions')}(id) + on delete restrict on update restrict, + ref_id_nullify bigint not null constraint fk_nullify + references #{table_name('referenced_table_all_on_delete_actions')}(id) + on delete set null on update set null, + ref_id_cascade bigint not null constraint fk_cascade + references #{table_name('referenced_table_all_on_delete_actions')}(id) + on delete cascade on update cascade, + ref_id_set_default bigint not null constraint fk_set_default + references #{table_name('referenced_table_all_on_delete_actions')}(id) + on delete set default on update set default ) SQL end - let(:fks) { described_class.by_constrained_table_name('constrained_table_all_on_delete_actions') } + let(:fks) { described_class.by_constrained_table_name(table_name('constrained_table_all_on_delete_actions')) } context 'with an invalid on_delete_action' do it 'raises an error' do @@ -137,7 +152,7 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end end - where(:fk_name, :expected_on_delete_action) do + where(:fk_name, :expected_action) do [ %w[fk_no_action no_action], %w[fk_restrict restrict], @@ -151,12 +166,22 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ subject(:fk) { fks.find_by(name: fk_name) } it 'has the appropriate on delete action' do - expect(fk.on_delete_action).to eq(expected_on_delete_action) + expect(fk.on_delete_action).to eq(expected_action) + end + + it 'has the appropriate on update action' do + expect(fk.on_update_action).to eq(expected_action) end describe '#by_on_delete_action' do it 'finds the key by on delete action' do - expect(fks.by_on_delete_action(expected_on_delete_action)).to contain_exactly(fk) + expect(fks.by_on_delete_action(expected_action)).to contain_exactly(fk) + end + end + + describe '#by_on_update_action' do + it 'finds the key by on update action' do + expect(fks.by_on_update_action(expected_action)).to contain_exactly(fk) end end end @@ -167,16 +192,17 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ skip('not supported before postgres 12') if ApplicationRecord.database.version.to_f < 12 ApplicationRecord.connection.execute(<<~SQL) - create table public.parent ( - id bigserial primary key not null - ) partition by hash(id); + create table #{schema_table_name('parent')} ( + id bigserial primary key not null + ) partition by hash(id); - create table public.child partition of parent for values with (modulus 2, remainder 1); + create table #{schema_table_name('child')} partition of #{table_name('parent')} + for values with (modulus 2, remainder 1); - create table public.referencing_partitioned ( - id bigserial not null primary key, - constraint fk_inherited foreign key (id) references parent(id) - ) + create table #{schema_table_name('referencing_partitioned')} ( + id bigserial not null primary key, + constraint fk_inherited foreign key (id) references #{table_name('parent')}(id) + ) SQL end @@ -185,7 +211,7 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ where(:fk, :inherited) do lazy { described_class.find_by(name: 'fk_inherited') } | false - lazy { described_class.by_referenced_table_identifier('public.child').first! } | true + lazy { described_class.by_referenced_table_identifier(schema_table_name('child')).first! } | true lazy { described_class.find_by(name: 'fk_constrained_to_referenced') } | false end @@ -197,12 +223,20 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_categ end describe '#not_inherited' do - let(:fks) { described_class.by_constrained_table_identifier('public.referencing_partitioned') } + let(:fks) { described_class.by_constrained_table_identifier(schema_table_name('referencing_partitioned')) } it 'lists all non-inherited foreign keys' do - expect(fks.pluck(:referenced_table_name)).to contain_exactly('parent', 'child') - expect(fks.not_inherited.pluck(:referenced_table_name)).to contain_exactly('parent') + expect(fks.pluck(:referenced_table_name)).to contain_exactly(table_name('parent'), table_name('child')) + expect(fks.not_inherited.pluck(:referenced_table_name)).to contain_exactly(table_name('parent')) end end end + + def schema_table_name(name) + "public.#{table_name(name)}" + end + + def table_name(name) + "#{table_prefix}_#{name}" + end end diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb index db66736676b..d8a2612caf3 100644 --- a/spec/lib/gitlab/database/postgres_index_spec.rb +++ b/spec/lib/gitlab/database/postgres_index_spec.rb @@ -12,7 +12,7 @@ RSpec.describe Gitlab::Database::PostgresIndex do CREATE INDEX #{name} ON public.users (name); CREATE UNIQUE INDEX bar_key ON public.users (id); - CREATE TABLE example_table (id serial primary key); + CREATE TABLE _test_gitlab_main_example_table (id serial primary key); SQL end @@ -144,7 +144,7 @@ RSpec.describe Gitlab::Database::PostgresIndex do end it 'returns true for a primary key index' do - expect(find('public.example_table_pkey')).to be_unique + expect(find('public._test_gitlab_main_example_table_pkey')).to be_unique end end diff --git a/spec/lib/gitlab/database/postgres_partitioned_table_spec.rb b/spec/lib/gitlab/database/postgres_partitioned_table_spec.rb index 21a46f1a0a6..170cc894071 100644 --- a/spec/lib/gitlab/database/postgres_partitioned_table_spec.rb +++ b/spec/lib/gitlab/database/postgres_partitioned_table_spec.rb @@ -3,25 +3,29 @@ require 'spec_helper' RSpec.describe Gitlab::Database::PostgresPartitionedTable, type: :model do - let(:schema) { 'public' } - let(:name) { 'foo_range' } - let(:identifier) { "#{schema}.#{name}" } + let_it_be(:foo_range_table_name) { '_test_gitlab_main_foo_range' } + let_it_be(:foo_list_table_name) { '_test_gitlab_main_foo_list' } + let_it_be(:foo_hash_table_name) { '_test_gitlab_main_foo_hash' } - before do + let_it_be(:schema) { 'public' } + let_it_be(:name) { foo_range_table_name } + let_it_be(:identifier) { "#{schema}.#{name}" } + + before_all do ActiveRecord::Base.connection.execute(<<~SQL) - CREATE TABLE #{identifier} ( + CREATE TABLE #{schema}.#{foo_range_table_name} ( id serial NOT NULL, created_at timestamptz NOT NULL, PRIMARY KEY (id, created_at) ) PARTITION BY RANGE(created_at); - CREATE TABLE public.foo_list ( + CREATE TABLE #{schema}.#{foo_list_table_name} ( id serial NOT NULL, row_type text NOT NULL, PRIMARY KEY (id, row_type) ) PARTITION BY LIST(row_type); - CREATE TABLE public.foo_hash ( + CREATE TABLE #{schema}.#{foo_hash_table_name} ( id serial NOT NULL, row_value int NOT NULL, PRIMARY KEY (id, row_value) @@ -56,31 +60,63 @@ RSpec.describe Gitlab::Database::PostgresPartitionedTable, type: :model do end end + describe '.each_partition' do + context 'without partitions' do + it 'does not yield control' do + expect { |b| described_class.each_partition(name, &b) }.not_to yield_control + end + end + + context 'with partitions' do + let(:partition_schema) { 'gitlab_partitions_dynamic' } + let(:partition1_name) { "#{partition_schema}.#{name}_202001" } + let(:partition2_name) { "#{partition_schema}.#{name}_202002" } + + before do + ActiveRecord::Base.connection.execute(<<~SQL) + CREATE TABLE #{partition1_name} PARTITION OF #{identifier} + FOR VALUES FROM ('2020-01-01') TO ('2020-02-01'); + + CREATE TABLE #{partition2_name} PARTITION OF #{identifier} + FOR VALUES FROM ('2020-02-01') TO ('2020-03-01'); + SQL + end + + it 'yields control with partition as argument' do + args = Gitlab::Database::PostgresPartition + .where(identifier: [partition1_name, partition2_name]) + .order(:name).to_a + + expect { |b| described_class.each_partition(name, &b) }.to yield_successive_args(*args) + end + end + end + describe '#dynamic?' do it 'returns true for tables partitioned by range' do - expect(find('public.foo_range')).to be_dynamic + expect(find("#{schema}.#{foo_range_table_name}")).to be_dynamic end it 'returns true for tables partitioned by list' do - expect(find('public.foo_list')).to be_dynamic + expect(find("#{schema}.#{foo_list_table_name}")).to be_dynamic end it 'returns false for tables partitioned by hash' do - expect(find('public.foo_hash')).not_to be_dynamic + expect(find("#{schema}.#{foo_hash_table_name}")).not_to be_dynamic end end describe '#static?' do it 'returns false for tables partitioned by range' do - expect(find('public.foo_range')).not_to be_static + expect(find("#{schema}.#{foo_range_table_name}")).not_to be_static end it 'returns false for tables partitioned by list' do - expect(find('public.foo_list')).not_to be_static + expect(find("#{schema}.#{foo_list_table_name}")).not_to be_static end it 'returns true for tables partitioned by hash' do - expect(find('public.foo_hash')).to be_static + expect(find("#{schema}.#{foo_hash_table_name}")).to be_static end end diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb index 47038bbd138..d31be6cb883 100644 --- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb +++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb @@ -28,19 +28,19 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection model: ApplicationRecord, sql: "SELECT 1 FROM projects LEFT JOIN ci_builds ON ci_builds.project_id=projects.id", expect_error: /The query tried to access \["projects", "ci_builds"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup } + setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } }, "for query accessing gitlab_ci and gitlab_main the gitlab_schemas is always ordered" => { model: ApplicationRecord, sql: "SELECT 1 FROM ci_builds LEFT JOIN projects ON ci_builds.project_id=projects.id", expect_error: /The query tried to access \["ci_builds", "projects"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup } + setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } }, "for query accessing main table from CI database" => { model: Ci::ApplicationRecord, sql: "SELECT 1 FROM projects", expect_error: /The query tried to access \["projects"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup } + setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } }, "for query accessing CI database" => { model: Ci::ApplicationRecord, @@ -51,13 +51,13 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection model: ::ApplicationRecord, sql: "SELECT 1 FROM ci_builds", expect_error: /The query tried to access \["ci_builds"\]/, - setup: -> (_) { skip_if_multiple_databases_not_setup } + setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } }, "for query accessing unknown gitlab_schema" => { model: ::ApplicationRecord, sql: "SELECT 1 FROM new_table", expect_error: /The query tried to access \["new_table"\] \(of undefined_new_table\)/, - setup: -> (_) { skip_if_multiple_databases_not_setup } + setup: -> (_) { skip_if_multiple_databases_not_setup(:ci) } } } end @@ -77,7 +77,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection context "when analyzer is enabled for tests", :query_analyzers do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) end it "throws an error when trying to access a table that belongs to the gitlab_main schema from the ci database" do diff --git a/spec/lib/gitlab/database/reflection_spec.rb b/spec/lib/gitlab/database/reflection_spec.rb index 389e93364c8..779bdbe50f0 100644 --- a/spec/lib/gitlab/database/reflection_spec.rb +++ b/spec/lib/gitlab/database/reflection_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Reflection do +RSpec.describe Gitlab::Database::Reflection, feature_category: :database do let(:database) { described_class.new(ApplicationRecord) } describe '#username' do diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb index bf993e85cb8..9482700df5f 100644 --- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb +++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb @@ -13,7 +13,7 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator, feature_category: :dat end let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) } - let(:lease_key) { "gitlab/database/indexing/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } + let(:lease_key) { "gitlab/database/asyncddl/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" } let(:lease_timeout) { 1.day } let(:uuid) { 'uuid' } diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb index 6575c92e313..a8af9bb5a38 100644 --- a/spec/lib/gitlab/database/reindexing_spec.rb +++ b/spec/lib/gitlab/database/reindexing_spec.rb @@ -68,6 +68,25 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_t end end end + + context 'when async FK validation is enabled' do + it 'executes FK validation for each database prior to any reindexing actions' do + expect(Gitlab::Database::AsyncForeignKeys).to receive(:validate_pending_entries!).ordered.exactly(databases_count).times + expect(described_class).to receive(:automatic_reindexing).ordered.exactly(databases_count).times + + described_class.invoke + end + end + + context 'when async FK validation is disabled' do + it 'does not execute FK validation' do + stub_feature_flags(database_async_foreign_key_validation: false) + + expect(Gitlab::Database::AsyncForeignKeys).not_to receive(:validate_pending_entries!) + + described_class.invoke + end + end end describe '.automatic_reindexing' do diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb index 1edcd890370..2cb84e2f02a 100644 --- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete do +RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete, feature_category: :subgroups do let(:migration) { FakeRenameReservedPathMigrationV1.new } let(:subject) { described_class.new(['the-path'], migration) } diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb index c507bce634e..5b5661020b0 100644 --- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_namespaces_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :delete do +RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :delete, +feature_category: :subgroups do let(:migration) { FakeRenameReservedPathMigrationV1.new } let(:subject) { described_class.new(['the-path'], migration) } let(:namespace) { create(:group, name: 'the-path') } diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb index aa2a3329477..787c9e87038 100644 --- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb +++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_projects_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :delete do +RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameProjects, :delete, +feature_category: :projects do let(:migration) { FakeRenameReservedPathMigrationV1.new } let(:subject) { described_class.new(['the-path'], migration) } let(:project) do diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb index 07c97ea0ec3..6a614e2488f 100644 --- a/spec/lib/gitlab/database/schema_migrations/context_spec.rb +++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb @@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do let(:connection_class) { Ci::ApplicationRecord } it 'returns a directory path that is database specific' do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) expect(context.schema_directory).to eq(File.join(Rails.root, described_class.default_schema_migrations_path)) end @@ -40,7 +40,7 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do end it 'returns a configured directory path that' do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) expect(context.schema_directory).to eq(File.join(Rails.root, 'db/ci_schema_migrations')) end @@ -52,7 +52,7 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do end it 'returns a configured directory path that' do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) expect(context.schema_directory).to eq(File.join(Rails.root, 'db/ci_schema_migrations')) end diff --git a/spec/lib/gitlab/database/schema_validation/database_spec.rb b/spec/lib/gitlab/database/schema_validation/database_spec.rb new file mode 100644 index 00000000000..c0026f91b46 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/database_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Database, feature_category: :database do + let(:database_name) { 'main' } + let(:database_indexes) do + [['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))']] + end + + let(:query_result) { instance_double('ActiveRecord::Result', rows: database_indexes) } + let(:database_model) { Gitlab::Database.database_base_models[database_name] } + let(:connection) { database_model.connection } + + subject(:database) { described_class.new(connection) } + + before do + allow(connection).to receive(:exec_query).and_return(query_result) + end + + describe '#fetch_index_by_name' do + context 'when index does not exist' do + it 'returns nil' do + index = database.fetch_index_by_name('non_existing_index') + + expect(index).to be_nil + end + end + + it 'returns index by name' do + index = database.fetch_index_by_name('index') + + expect(index.name).to eq('index') + end + end + + describe '#indexes' do + it 'returns indexes' do + indexes = database.indexes + + expect(indexes).to all(be_a(Gitlab::Database::SchemaValidation::Index)) + expect(indexes.map(&:name)).to eq(['index']) + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/index_spec.rb b/spec/lib/gitlab/database/schema_validation/index_spec.rb new file mode 100644 index 00000000000..297211d79ed --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/index_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Index, feature_category: :database do + let(:index_statement) { 'CREATE INDEX index_name ON public.achievements USING btree (namespace_id)' } + + let(:stmt) { PgQuery.parse(index_statement).tree.stmts.first.stmt.index_stmt } + + let(:index) { described_class.new(stmt) } + + describe '#name' do + it 'returns index name' do + expect(index.name).to eq('index_name') + end + end + + describe '#statement' do + it 'returns index statement' do + expect(index.statement).to eq(index_statement) + end + end +end diff --git a/spec/lib/gitlab/database/schema_validation/indexes_spec.rb b/spec/lib/gitlab/database/schema_validation/indexes_spec.rb new file mode 100644 index 00000000000..4351031a4b4 --- /dev/null +++ b/spec/lib/gitlab/database/schema_validation/indexes_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::SchemaValidation::Indexes, feature_category: :database do + let(:structure_file_path) { Rails.root.join('spec/fixtures/structure.sql') } + let(:database_indexes) do + [ + ['wrong_index', 'CREATE UNIQUE INDEX wrong_index ON public.table_name (column_name)'], + ['extra_index', 'CREATE INDEX extra_index ON public.table_name (column_name)'], + ['index', 'CREATE UNIQUE INDEX "index" ON public.achievements USING btree (namespace_id, lower(name))'] + ] + end + + let(:database_name) { 'main' } + + let(:database_model) { Gitlab::Database.database_base_models[database_name] } + + let(:connection) { database_model.connection } + + let(:query_result) { instance_double('ActiveRecord::Result', rows: database_indexes) } + + let(:database) { Gitlab::Database::SchemaValidation::Database.new(connection) } + let(:structure_file) { Gitlab::Database::SchemaValidation::StructureSql.new(structure_file_path) } + + subject(:schema_validation) { described_class.new(structure_file, database) } + + before do + allow(connection).to receive(:exec_query).and_return(query_result) + end + + describe '#missing_indexes' do + it 'returns missing indexes' do + missing_indexes = %w[ + missing_index + index_namespaces_public_groups_name_id + index_on_deploy_keys_id_and_type_and_public + index_users_on_public_email_excluding_null_and_empty + ] + + expect(schema_validation.missing_indexes).to match_array(missing_indexes) + end + end + + describe '#extra_indexes' do + it 'returns extra indexes' do + expect(schema_validation.extra_indexes).to match_array(['extra_index']) + end + end + + describe '#wrong_indexes' do + it 'returns wrong indexes' do + expect(schema_validation.wrong_indexes).to match_array(['wrong_index']) + end + end +end diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb index 7e0ba3397d1..2ae6ccf6c6a 100644 --- a/spec/lib/gitlab/database/shared_model_spec.rb +++ b/spec/lib/gitlab/database/shared_model_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::SharedModel do +RSpec.describe Gitlab::Database::SharedModel, feature_category: :database do describe 'using an external connection' do let!(:original_connection) { described_class.connection } let(:new_connection) { double('connection') } @@ -85,28 +85,51 @@ RSpec.describe Gitlab::Database::SharedModel do end end end - - def expect_original_connection_around - # For safety, ensure our original connection is distinct from our double - # This should be the case, but in case of something leaking we should verify - expect(original_connection).not_to be(new_connection) - expect(described_class.connection).to be(original_connection) - - yield - - expect(described_class.connection).to be(original_connection) - end end describe '#connection_db_config' do - it 'returns the class connection_db_config' do - shared_model_class = Class.new(described_class) do + let!(:original_connection) { shared_model_class.connection } + let!(:original_connection_db_config) { shared_model_class.connection_db_config } + let(:shared_model) { shared_model_class.new } + let(:shared_model_class) do + Class.new(described_class) do self.table_name = 'postgres_async_indexes' end + end - shared_model = shared_model_class.new - + it 'returns the class connection_db_config' do expect(shared_model.connection_db_config).to eq(described_class.connection_db_config) end + + context 'when switching the class connection' do + before do + skip_if_multiple_databases_not_setup + end + + let(:new_base_model) { Ci::ApplicationRecord } + let(:new_connection) { new_base_model.connection } + + it 'returns the db_config of the used connection when using load balancing' do + expect_original_connection_around do + described_class.using_connection(new_connection) do + expect(shared_model.connection_db_config).to eq(new_base_model.connection_db_config) + end + end + + # it restores the connection_db_config afterwards + expect(shared_model.connection_db_config).to eq(original_connection_db_config) + end + end + end + + def expect_original_connection_around + # For safety, ensure our original connection is distinct from our double + # This should be the case, but in case of something leaking we should verify + expect(original_connection).not_to be(new_connection) + expect(described_class.connection).to be(original_connection) + + yield + + expect(described_class.connection).to be(original_connection) end end diff --git a/spec/lib/gitlab/database/tables_locker_spec.rb b/spec/lib/gitlab/database/tables_locker_spec.rb new file mode 100644 index 00000000000..d74f455eaad --- /dev/null +++ b/spec/lib/gitlab/database/tables_locker_spec.rb @@ -0,0 +1,226 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base, :delete, :silence_stdout, + :suppress_gitlab_schemas_validate_connection, feature_category: :pods do + let(:detached_partition_table) { '_test_gitlab_main_part_20220101' } + let(:lock_writes_manager) do + instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil, unlock_writes: nil) + end + + before do + allow(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) + end + + before(:all) do + create_detached_partition_sql = <<~SQL + CREATE TABLE IF NOT EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_20220101 ( + id bigserial primary key not null + ) + SQL + + ApplicationRecord.connection.execute(create_detached_partition_sql) + Ci::ApplicationRecord.connection.execute(create_detached_partition_sql) + + Gitlab::Database::SharedModel.using_connection(ApplicationRecord.connection) do + Postgresql::DetachedPartition.create!( + table_name: '_test_gitlab_main_part_20220101', + drop_after: Time.current + ) + end + end + + after(:all) do + drop_detached_partition_sql = <<~SQL + DROP TABLE IF EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_20220101 + SQL + + ApplicationRecord.connection.execute(drop_detached_partition_sql) + Ci::ApplicationRecord.connection.execute(drop_detached_partition_sql) + + Gitlab::Database::SharedModel.using_connection(ApplicationRecord.connection) do + Postgresql::DetachedPartition.delete_all + end + end + + shared_examples "lock tables" do |table_schema, database_name| + let(:table_name) do + Gitlab::Database::GitlabSchema + .tables_to_schema.filter_map { |table_name, schema| table_name if schema == table_schema } + .first + end + + let(:database) { database_name } + + it "locks table in schema #{table_schema} and database #{database_name}" do + expect(Gitlab::Database::LockWritesManager).to receive(:new).with( + table_name: table_name, + connection: anything, + database_name: database, + with_retries: true, + logger: anything, + dry_run: anything + ).once.and_return(lock_writes_manager) + expect(lock_writes_manager).to receive(:lock_writes) + + subject + end + end + + shared_examples "unlock tables" do |table_schema, database_name| + let(:table_name) do + Gitlab::Database::GitlabSchema + .tables_to_schema.filter_map { |table_name, schema| table_name if schema == table_schema } + .first + end + + let(:database) { database_name } + + it "unlocks table in schema #{table_schema} and database #{database_name}" do + expect(Gitlab::Database::LockWritesManager).to receive(:new).with( + table_name: table_name, + connection: anything, + database_name: database, + with_retries: true, + logger: anything, + dry_run: anything + ).once.and_return(lock_writes_manager) + expect(lock_writes_manager).to receive(:unlock_writes) + + subject + end + end + + context 'when running on single database' do + before do + skip_if_multiple_databases_are_setup(:ci) + end + + describe '#lock_writes' do + subject { described_class.new.lock_writes } + + it 'does not call Gitlab::Database::LockWritesManager.lock_writes' do + expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) + expect(lock_writes_manager).not_to receive(:lock_writes) + + subject + end + + include_examples "unlock tables", :gitlab_main, 'main' + include_examples "unlock tables", :gitlab_ci, 'ci' + include_examples "unlock tables", :gitlab_shared, 'main' + include_examples "unlock tables", :gitlab_internal, 'main' + end + + describe '#unlock_writes' do + subject { described_class.new.lock_writes } + + it 'does call Gitlab::Database::LockWritesManager.unlock_writes' do + expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) + expect(lock_writes_manager).to receive(:unlock_writes) + + subject + end + end + end + + context 'when running on multiple databases' do + before do + skip_if_multiple_databases_not_setup(:ci) + end + + describe '#lock_writes' do + subject { described_class.new.lock_writes } + + include_examples "lock tables", :gitlab_ci, 'main' + include_examples "lock tables", :gitlab_main, 'ci' + + include_examples "unlock tables", :gitlab_main, 'main' + include_examples "unlock tables", :gitlab_ci, 'ci' + include_examples "unlock tables", :gitlab_shared, 'main' + include_examples "unlock tables", :gitlab_shared, 'ci' + include_examples "unlock tables", :gitlab_internal, 'main' + include_examples "unlock tables", :gitlab_internal, 'ci' + end + + describe '#unlock_writes' do + subject { described_class.new.unlock_writes } + + include_examples "unlock tables", :gitlab_ci, 'main' + include_examples "unlock tables", :gitlab_main, 'ci' + include_examples "unlock tables", :gitlab_main, 'main' + include_examples "unlock tables", :gitlab_ci, 'ci' + include_examples "unlock tables", :gitlab_shared, 'main' + include_examples "unlock tables", :gitlab_shared, 'ci' + include_examples "unlock tables", :gitlab_internal, 'main' + include_examples "unlock tables", :gitlab_internal, 'ci' + end + + context 'when running in dry_run mode' do + subject { described_class.new(dry_run: true).lock_writes } + + it 'passes dry_run flag to LockManger' do + expect(Gitlab::Database::LockWritesManager).to receive(:new).with( + table_name: 'users', + connection: anything, + database_name: 'ci', + with_retries: true, + logger: anything, + dry_run: true + ).and_return(lock_writes_manager) + expect(lock_writes_manager).to receive(:lock_writes) + + subject + end + end + + context 'when running on multiple shared databases' do + subject { described_class.new.lock_writes } + + before do + allow(::Gitlab::Database).to receive(:db_config_share_with).and_return(nil) + ci_db_config = Ci::ApplicationRecord.connection_db_config + allow(::Gitlab::Database).to receive(:db_config_share_with).with(ci_db_config).and_return('main') + end + + it 'does not lock any tables if the ci database is shared with main database' do + expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager) + expect(lock_writes_manager).not_to receive(:lock_writes) + + subject + end + end + end + + context 'when geo database is configured' do + let(:geo_table) do + Gitlab::Database::GitlabSchema + .tables_to_schema.filter_map { |table_name, schema| table_name if schema == :gitlab_geo } + .first + end + + subject { described_class.new.unlock_writes } + + before do + skip "Geo database is not configured" unless Gitlab::Database.has_config?(:geo) + end + + it 'does not lock table in geo database' do + expect(Gitlab::Database::LockWritesManager).not_to receive(:new).with( + table_name: geo_table, + connection: anything, + database_name: 'geo', + with_retries: true, + logger: anything, + dry_run: anything + ) + + subject + end + end +end + +def number_of_triggers(connection) + connection.select_value("SELECT count(*) FROM information_schema.triggers") +end diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb index 9af0b964221..3bb2f4e982c 100644 --- a/spec/lib/gitlab/database/tables_truncate_spec.rb +++ b/spec/lib/gitlab/database/tables_truncate_spec.rb @@ -48,7 +48,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba end before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) # Creating some test tables on the main database main_tables_sql = <<~SQL @@ -313,7 +313,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba context 'when running with multiple shared databases' do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) ci_db_config = Ci::ApplicationRecord.connection_db_config allow(::Gitlab::Database).to receive(:db_config_share_with).with(ci_db_config).and_return('main') end @@ -333,7 +333,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba context 'when running in a single database mode' do before do - skip_if_multiple_databases_are_setup + skip_if_multiple_databases_are_setup(:ci) end it 'raises an error when truncating the main database that it is a single database setup' do diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb index 074c18d406e..d1cb014a594 100644 --- a/spec/lib/gitlab/database/transaction/observer_spec.rb +++ b/spec/lib/gitlab/database/transaction/observer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::Transaction::Observer do +RSpec.describe Gitlab::Database::Transaction::Observer, feature_category: :database do # Use the delete DB strategy so that the test won't be wrapped in a transaction describe '.instrument_transactions', :delete do let(:transaction_context) { ActiveRecord::Base.connection.transaction_manager.transaction_context } diff --git a/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb b/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb new file mode 100644 index 00000000000..5b68f9a3757 --- /dev/null +++ b/spec/lib/gitlab/database/transaction_timeout_settings_spec.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::TransactionTimeoutSettings, feature_category: :pods do + let(:connection) { ActiveRecord::Base.connection } + + subject { described_class.new(connection) } + + after(:all) do + described_class.new(ActiveRecord::Base.connection).restore_timeouts + end + + describe '#disable_timeouts' do + it 'sets timeouts to 0' do + subject.disable_timeouts + + expect(current_timeout).to eq("0") + end + end + + describe '#restore_timeouts' do + before do + subject.disable_timeouts + end + + it 'resets value' do + expect(connection).to receive(:execute).with('RESET idle_in_transaction_session_timeout').and_call_original + + subject.restore_timeouts + end + end + + def current_timeout + connection.execute("show idle_in_transaction_session_timeout").first['idle_in_transaction_session_timeout'] + end +end diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb index 836332524a9..9ccae754a92 100644 --- a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb +++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do +RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction, feature_category: :database do let(:env) { {} } let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER } let(:subject) { described_class.new(connection: connection, env: env, logger: logger, timing_configuration: timing_configuration) } diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb index 797a01c482d..7fe6362634b 100644 --- a/spec/lib/gitlab/database/with_lock_retries_spec.rb +++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Database::WithLockRetries do +RSpec.describe Gitlab::Database::WithLockRetries, feature_category: :database do let(:env) { {} } let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER } let(:subject) { described_class.new(connection: connection, env: env, logger: logger, allow_savepoints: allow_savepoints, timing_configuration: timing_configuration) } diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb index d67e50a50d4..d878d46c883 100644 --- a/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb +++ b/spec/lib/gitlab/database_importers/self_monitoring/project/delete_service_spec.rb @@ -38,6 +38,8 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService it 'deletes project ID from application settings' do subject.execute + LooseForeignKeys::ProcessDeletedRecordsService.new(connection: Project.connection).execute + expect(application_setting.reload.self_monitoring_project_id).to be_nil end diff --git a/spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb b/spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb index d044170dc75..3b6d10f4a7e 100644 --- a/spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb +++ b/spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter do +RSpec.describe Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter, feature_category: :team_planning do subject { described_class.upsert_types } it_behaves_like 'work item base types importer' diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index 86bc8e71fd7..26d6ff431ec 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -33,20 +33,31 @@ RSpec.describe Gitlab::Database do describe '.has_config?' do context 'three tier database config' do - before do - allow(Gitlab::Application).to receive_message_chain(:config, :database_configuration, :[]).with(Rails.env) - .and_return({ - "primary" => { "adapter" => "postgresql", "database" => "gitlabhq_test" }, - "ci" => { "adapter" => "postgresql", "database" => "gitlabhq_test_ci" } - }) + it 'returns true for main' do + expect(described_class.has_config?(:main)).to eq(true) end - it 'returns true for primary' do - expect(described_class.has_config?(:primary)).to eq(true) - end + context 'ci' do + before do + # CI config might not be configured + allow(ActiveRecord::Base.configurations).to receive(:configs_for) + .with(env_name: 'test', name: 'ci', include_replicas: true) + .and_return(ci_db_config) + end + + let(:ci_db_config) { instance_double('ActiveRecord::DatabaseConfigurations::HashConfig') } + + it 'returns true for ci' do + expect(described_class.has_config?(:ci)).to eq(true) + end - it 'returns true for ci' do - expect(described_class.has_config?(:ci)).to eq(true) + context 'ci database.yml not configured' do + let(:ci_db_config) { nil } + + it 'returns false for ci' do + expect(described_class.has_config?(:ci)).to eq(false) + end + end end it 'returns false for non-existent' do @@ -189,7 +200,7 @@ RSpec.describe Gitlab::Database do end it 'returns the ci_replica for a ci database replica' do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) replica = Ci::ApplicationRecord.load_balancer.host expect(described_class.db_config_name(replica)).to eq('ci_replica') end @@ -256,7 +267,7 @@ RSpec.describe Gitlab::Database do context "when there's CI connection" do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) end context 'when CI uses database_tasks: false does indicate that ci: is subset of main:' do @@ -516,4 +527,33 @@ RSpec.describe Gitlab::Database do end end end + + describe '.read_minimum_migration_version' do + before do + allow(Dir).to receive(:open).with(Rails.root.join('db/migrate')).and_return(migration_files) + end + + context 'valid migration files exist' do + let(:migration_files) do + [ + '20211004170422_init_schema.rb', + '20211005182304_add_users.rb' + ] + end + + let(:valid_schema) { 20211004170422 } + + it 'finds the correct ID' do + expect(described_class.read_minimum_migration_version).to eq valid_schema + end + end + + context 'no valid migration files exist' do + let(:migration_files) { ['readme.txt', 'INSTALL'] } + + it 'returns nil' do + expect(described_class.read_minimum_migration_version).to be_nil + end + end + end end diff --git a/spec/lib/gitlab/deploy_key_access_spec.rb b/spec/lib/gitlab/deploy_key_access_spec.rb index 83b97c8ba25..e32858cc13f 100644 --- a/spec/lib/gitlab/deploy_key_access_spec.rb +++ b/spec/lib/gitlab/deploy_key_access_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::DeployKeyAccess do +RSpec.describe Gitlab::DeployKeyAccess, feature_category: :source_code_management do let_it_be(:user) { create(:user) } let_it_be(:deploy_key) { create(:deploy_key, user: user) } @@ -17,10 +17,30 @@ RSpec.describe Gitlab::DeployKeyAccess do end describe '#can_create_tag?' do + let!(:protected_tag) { create(:protected_tag, :no_one_can_create, project: project, name: 'v*') } + + context 'when no-one can create tag' do + it 'returns false' do + expect(access.can_create_tag?('v0.1.2')).to be_falsey + end + + context 'when deploy_key_for_protected_tags FF is disabled' do + before do + stub_feature_flags(deploy_key_for_protected_tags: false) + end + + it 'allows to push the tag' do + expect(access.can_create_tag?('v0.1.2')).to be_truthy + end + end + end + context 'push tag that matches a protected tag pattern via a deploy key' do - it 'still pushes that tag' do - create(:protected_tag, project: project, name: 'v*') + before do + create(:protected_tag_create_access_level, protected_tag: protected_tag, deploy_key: deploy_key) + end + it 'allows to push the tag' do expect(access.can_create_tag?('v0.1.2')).to be_truthy end end diff --git a/spec/lib/gitlab/email/html_to_markdown_parser_spec.rb b/spec/lib/gitlab/email/html_to_markdown_parser_spec.rb new file mode 100644 index 00000000000..fe585d47d59 --- /dev/null +++ b/spec/lib/gitlab/email/html_to_markdown_parser_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Email::HtmlToMarkdownParser, feature_category: :service_desk do + subject { described_class.convert(html) } + + describe '.convert' do + let(:html) { fixture_file("lib/gitlab/email/basic.html") } + + it 'parses html correctly' do + expect(subject) + .to eq( + <<-BODY.strip_heredoc.chomp + Hello, World! + This is some e-mail content. Even though it has whitespace and newlines, the e-mail converter will handle it correctly. + *Even* mismatched tags. + A div + Another div + A div + **within** a div + + Another line + Yet another line + [A link](http://foo.com) + <details> + <summary> + One</summary> + Some details</details> + + <details> + <summary> + Two</summary> + Some details</details> + + ![Miro](http://img.png) + Col A Col B + Data A1 Data B1 + Data A2 Data B2 + Data A3 Data B4 + Total A Total B + BODY + ) + end + end +end diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb index c61d941406b..e4c68dbba92 100644 --- a/spec/lib/gitlab/email/reply_parser_spec.rb +++ b/spec/lib/gitlab/email/reply_parser_spec.rb @@ -63,6 +63,18 @@ RSpec.describe Gitlab::Email::ReplyParser do ) end + it "properly renders html-only email with table and blockquote" do + expect(test_parse_body(fixture_file("emails/html_table_and_blockquote.eml"))) + .to eq( + <<-BODY.strip_heredoc.chomp + Company Contact Country + Alfreds Futterkiste Maria Anders Germany + Centro comercial Moctezuma Francisco Chang Mexico + Words can be like X-rays, if you use them properly—they’ll go through anything. You read and you’re pierced. + BODY + ) + end + it "supports a Dutch reply" do expect(test_parse_body(fixture_file("emails/dutch.eml"))).to eq("Dit is een antwoord in het Nederlands.") end @@ -176,6 +188,70 @@ RSpec.describe Gitlab::Email::ReplyParser do ) end + context 'properly renders email reply from gmail web client' do + context 'when feature flag is enabled' do + it do + expect(test_parse_body(fixture_file("emails/html_only.eml"))) + .to eq( + <<-BODY.strip_heredoc.chomp + ### This is a reply from standard GMail in Google Chrome. + + The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. + + Here's some **bold** text, **strong** text and *italic* in Markdown. + + Here's a link http://example.com + + Here's an img ![Miro](http://img.png)<details> + <summary> + One</summary> + Some details</details> + + <details> + <summary> + Two</summary> + Some details</details> + + Test reply. + + First paragraph. + + Second paragraph. + BODY + ) + end + end + + context 'when feature flag is disabled' do + before do + stub_feature_flags(service_desk_html_to_text_email_handler: false) + end + + it do + expect(test_parse_body(fixture_file("emails/html_only.eml"))) + .to eq( + <<-BODY.strip_heredoc.chomp + ### This is a reply from standard GMail in Google Chrome. + + The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. + + Here's some **bold** text, strong text and italic in Markdown. + + Here's a link http://example.com + + Here's an img [Miro]One Some details Two Some details + + Test reply. + + First paragraph. + + Second paragraph. + BODY + ) + end + end + end + it "properly renders email reply from iOS default mail client" do expect(test_parse_body(fixture_file("emails/ios_default.eml"))) .to eq( diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb index c62e3071fc1..bc72d1a67d6 100644 --- a/spec/lib/gitlab/encoding_helper_spec.rb +++ b/spec/lib/gitlab/encoding_helper_spec.rb @@ -283,4 +283,12 @@ RSpec.describe Gitlab::EncodingHelper do expect(described_class.unquote_path('"\a\b\e\f\n\r\t\v\""')).to eq("\a\b\e\f\n\r\t\v\"") end end + + describe '#strip_bom' do + it do + expect(described_class.strip_bom('no changes')).to eq('no changes') + expect(described_class.strip_bom("\xEF\xBB\xBFhello world")).to eq('hello world') + expect(described_class.strip_bom("BOM at the end\xEF\xBB\xBF")).to eq("BOM at the end\xEF\xBB\xBF") + end + end end diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb index 5eedd716a4a..0f056ee9eac 100644 --- a/spec/lib/gitlab/error_tracking_spec.rb +++ b/spec/lib/gitlab/error_tracking_spec.rb @@ -490,4 +490,28 @@ RSpec.describe Gitlab::ErrorTracking do end end end + + context 'Sentry performance monitoring' do + context 'when ENABLE_SENTRY_PERFORMANCE_MONITORING env is disabled' do + before do + stub_env('ENABLE_SENTRY_PERFORMANCE_MONITORING', false) + described_class.configure_sentry # Force re-initialization to reset traces_sample_rate setting + end + + it 'does not set traces_sample_rate' do + expect(Sentry.get_current_client.configuration.traces_sample_rate.present?).to eq false + end + end + + context 'when ENABLE_SENTRY_PERFORMANCE_MONITORING env is enabled' do + before do + stub_env('ENABLE_SENTRY_PERFORMANCE_MONITORING', true) + described_class.configure_sentry # Force re-initialization to reset traces_sample_rate setting + end + + it 'sets traces_sample_rate' do + expect(Sentry.get_current_client.configuration.traces_sample_rate.present?).to eq true + end + end + end end diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb index da5eaf2e4ab..fa0b3d1c6dd 100644 --- a/spec/lib/gitlab/etag_caching/middleware_spec.rb +++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb @@ -123,7 +123,15 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state format: :html, method: 'GET', path: enabled_path, - status: status_code + status: status_code, + request_urgency: :medium, + target_duration_s: 0.5, + metadata: a_hash_including( + { + 'meta.caller_id' => 'Projects::NotesController#index', + 'meta.feature_category' => 'team_planning' + } + ) } end @@ -172,10 +180,11 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state expect(headers).to include('X-Gitlab-From-Cache' => 'true') end - it "pushes route's feature category to the context" do + it "pushes expected information in to the context" do expect(Gitlab::ApplicationContext).to receive(:push).with( feature_category: 'team_planning', - caller_id: 'Projects::NotesController#index' + caller_id: 'Projects::NotesController#index', + remote_ip: '127.0.0.1' ) _, _, _ = middleware.call(build_request(path, if_none_match)) @@ -291,7 +300,8 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state { 'PATH_INFO' => path, 'HTTP_IF_NONE_MATCH' => if_none_match, 'rack.input' => '', - 'REQUEST_METHOD' => 'GET' } + 'REQUEST_METHOD' => 'GET', + 'REMOTE_ADDR' => '127.0.0.1' } end def payload_for(event) diff --git a/spec/lib/gitlab/etag_caching/router/graphql_spec.rb b/spec/lib/gitlab/etag_caching/router/graphql_spec.rb index 792f02f8cda..fae468ab84f 100644 --- a/spec/lib/gitlab/etag_caching/router/graphql_spec.rb +++ b/spec/lib/gitlab/etag_caching/router/graphql_spec.rb @@ -18,6 +18,12 @@ RSpec.describe Gitlab::EtagCaching::Router::Graphql do end end + it 'applies the default urgency for every route', :aggregate_failures do + described_class::ROUTES.each do |route| + expect(route.urgency).to be(Gitlab::EndpointAttributes::DEFAULT_URGENCY) + end + end + def match_route(path, header) described_class.match( double(path_info: path, diff --git a/spec/lib/gitlab/etag_caching/router/rails_spec.rb b/spec/lib/gitlab/etag_caching/router/rails_spec.rb index da6c11e3cb1..251f634aac1 100644 --- a/spec/lib/gitlab/etag_caching/router/rails_spec.rb +++ b/spec/lib/gitlab/etag_caching/router/rails_spec.rb @@ -109,17 +109,23 @@ RSpec.describe Gitlab::EtagCaching::Router::Rails do it 'has a valid feature category for every route', :aggregate_failures do feature_categories = Gitlab::FeatureCategories.default.categories - described_class::ROUTES.each do |route| + described_class.all_routes.each do |route| expect(feature_categories).to include(route.feature_category), "#{route.name} has a category of #{route.feature_category}, which is not valid" end end it 'has a caller_id for every route', :aggregate_failures do - described_class::ROUTES.each do |route| + described_class.all_routes.each do |route| expect(route.caller_id).to include('#'), "#{route.name} has caller_id #{route.caller_id}, which is not valid" end end + it 'has an urgency for every route', :aggregate_failures do + described_class.all_routes.each do |route| + expect(route.urgency).to be_an_instance_of(Gitlab::EndpointAttributes::Config::RequestUrgency) + end + end + def match_route(path) described_class.match(double(path_info: path)) end diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb index 8d2183bc03d..bc07f9a99a5 100644 --- a/spec/lib/gitlab/etag_caching/router_spec.rb +++ b/spec/lib/gitlab/etag_caching/router_spec.rb @@ -11,6 +11,7 @@ RSpec.describe Gitlab::EtagCaching::Router do expect(result).to be_present expect(result.name).to eq 'project_pipelines' expect(result.router).to eq Gitlab::EtagCaching::Router::Rails + expect(result.urgency).to eq Projects::PipelinesController.urgency_for_action(:index) end end @@ -21,6 +22,7 @@ RSpec.describe Gitlab::EtagCaching::Router do expect(result).to be_present expect(result.name).to eq 'pipelines_graph' expect(result.router).to eq Gitlab::EtagCaching::Router::Graphql + expect(result.urgency).to eq ::Gitlab::EndpointAttributes::DEFAULT_URGENCY end it 'matches pipeline sha endpoint' do diff --git a/spec/lib/gitlab/external_authorization/config_spec.rb b/spec/lib/gitlab/external_authorization/config_spec.rb new file mode 100644 index 00000000000..4231b0d3747 --- /dev/null +++ b/spec/lib/gitlab/external_authorization/config_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::ExternalAuthorization::Config, feature_category: :authentication_and_authorization do + it 'allows deploy tokens and keys when external authorization is disabled' do + stub_application_setting(external_authorization_service_enabled: false) + expect(described_class.allow_deploy_tokens_and_deploy_keys?).to be_eql(true) + end + + context 'when external authorization is enabled' do + it 'disable deploy tokens and keys' do + stub_application_setting(external_authorization_service_enabled: true) + expect(described_class.allow_deploy_tokens_and_deploy_keys?).to be_eql(false) + end + + it "enable deploy tokens and keys when it is explicitly enabled and service url is blank" do + stub_application_setting(external_authorization_service_enabled: true) + stub_application_setting(allow_deploy_tokens_and_keys_with_external_authn: true) + expect(described_class.allow_deploy_tokens_and_deploy_keys?).to be_eql(true) + end + end +end diff --git a/spec/lib/gitlab/file_finder_spec.rb b/spec/lib/gitlab/file_finder_spec.rb index 0b5303f22b4..27750f10e87 100644 --- a/spec/lib/gitlab/file_finder_spec.rb +++ b/spec/lib/gitlab/file_finder_spec.rb @@ -2,9 +2,9 @@ require 'spec_helper' -RSpec.describe Gitlab::FileFinder do +RSpec.describe Gitlab::FileFinder, feature_category: :global_search do describe '#find' do - let(:project) { create(:project, :public, :repository) } + let_it_be(:project) { create(:project, :public, :repository) } subject { described_class.new(project, project.default_branch) } @@ -13,58 +13,124 @@ RSpec.describe Gitlab::FileFinder do let(:expected_file_by_content) { 'CHANGELOG' } end - context 'with inclusive filters' do - it 'filters by filename' do - results = subject.find('files filename:wm.svg') + context 'when code_basic_search_files_by_regexp is enabled' do + before do + stub_feature_flags(code_basic_search_files_by_regexp: true) + end + + context 'with inclusive filters' do + it 'filters by filename' do + results = subject.find('files filename:wm.svg') + + expect(results.count).to eq(1) + end + + it 'filters by path' do + results = subject.find('white path:images') - expect(results.count).to eq(1) + expect(results.count).to eq(2) + end + + it 'filters by extension' do + results = subject.find('files extension:md') + + expect(results.count).to eq(4) + end end - it 'filters by path' do - results = subject.find('white path:images') + context 'with exclusive filters' do + it 'filters by filename' do + results = subject.find('files -filename:wm.svg') + + expect(results.count).to eq(26) + end - expect(results.count).to eq(1) + it 'filters by path' do + results = subject.find('white -path:images') + + expect(results.count).to eq(5) + end + + it 'filters by extension' do + results = subject.find('files -extension:md') + + expect(results.count).to eq(23) + end end - it 'filters by extension' do - results = subject.find('files extension:md') + context 'with white space in the path' do + it 'filters by path correctly' do + results = subject.find('directory path:"with space/README.md"') - expect(results.count).to eq(4) + expect(results.count).to eq(1) + end end - end - context 'with exclusive filters' do - it 'filters by filename' do - results = subject.find('files -filename:wm.svg') + it 'does not cause N+1 query' do + expect(Gitlab::GitalyClient).to receive(:call).at_most(10).times.and_call_original - expect(results.count).to eq(26) + subject.find(': filename:wm.svg') end + end - it 'filters by path' do - results = subject.find('white -path:images') + context 'when code_basic_search_files_by_regexp is disabled' do + before do + stub_feature_flags(code_basic_search_files_by_regexp: false) + end - expect(results.count).to eq(4) + context 'with inclusive filters' do + it 'filters by filename' do + results = subject.find('files filename:wm.svg') + + expect(results.count).to eq(1) + end + + it 'filters by path' do + results = subject.find('white path:images') + + expect(results.count).to eq(1) + end + + it 'filters by extension' do + results = subject.find('files extension:md') + + expect(results.count).to eq(4) + end end - it 'filters by extension' do - results = subject.find('files -extension:md') + context 'with exclusive filters' do + it 'filters by filename' do + results = subject.find('files -filename:wm.svg') + + expect(results.count).to eq(26) + end - expect(results.count).to eq(23) + it 'filters by path' do + results = subject.find('white -path:images') + + expect(results.count).to eq(4) + end + + it 'filters by extension' do + results = subject.find('files -extension:md') + + expect(results.count).to eq(23) + end end - end - context 'with white space in the path' do - it 'filters by path correctly' do - results = subject.find('directory path:"with space/README.md"') + context 'with white space in the path' do + it 'filters by path correctly' do + results = subject.find('directory path:"with space/README.md"') - expect(results.count).to eq(1) + expect(results.count).to eq(1) + end end - end - it 'does not cause N+1 query' do - expect(Gitlab::GitalyClient).to receive(:call).at_most(10).times.and_call_original + it 'does not cause N+1 query' do + expect(Gitlab::GitalyClient).to receive(:call).at_most(10).times.and_call_original - subject.find(': filename:wm.svg') + subject.find(': filename:wm.svg') + end end end end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index 6cff39c1167..72043ba2a21 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -1947,6 +1947,53 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen expect(reference.name).to be_a(String) expect(reference.target).to be_a(String) end + + it 'filters by pattern' do + refs = repository.list_refs([Gitlab::Git::TAG_REF_PREFIX]) + + refs.each do |reference| + expect(reference.name).to include(Gitlab::Git::TAG_REF_PREFIX) + end + end + + context 'with pointing_at_oids and peel_tags options' do + let(:commit_id) { mutable_repository.commit.id } + let!(:annotated_tag) { mutable_repository.add_tag('annotated-tag', user: user, target: commit_id, message: 'Tag message') } + let!(:lw_tag) { mutable_repository.add_tag('lw-tag', user: user, target: commit_id) } + + it 'filters by target OIDs' do + refs = mutable_repository.list_refs([Gitlab::Git::TAG_REF_PREFIX], pointing_at_oids: [commit_id]) + + expect(refs.length).to eq(2) + expect(refs).to contain_exactly( + Gitaly::ListRefsResponse::Reference.new( + name: "#{Gitlab::Git::TAG_REF_PREFIX}#{lw_tag.name}", + target: commit_id + ), + Gitaly::ListRefsResponse::Reference.new( + name: "#{Gitlab::Git::TAG_REF_PREFIX}#{annotated_tag.name}", + target: annotated_tag.id + ) + ) + end + + it 'returns peeled_target for annotated tags' do + refs = mutable_repository.list_refs([Gitlab::Git::TAG_REF_PREFIX], pointing_at_oids: [commit_id], peel_tags: true) + + expect(refs.length).to eq(2) + expect(refs).to contain_exactly( + Gitaly::ListRefsResponse::Reference.new( + name: "#{Gitlab::Git::TAG_REF_PREFIX}#{lw_tag.name}", + target: commit_id + ), + Gitaly::ListRefsResponse::Reference.new( + name: "#{Gitlab::Git::TAG_REF_PREFIX}#{annotated_tag.name}", + target: annotated_tag.id, + peeled_target: commit_id + ) + ) + end + end end describe '#refs_by_oid' do @@ -2059,16 +2106,22 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen let(:repository) { mutable_repository } let(:source_sha) { '913c66a37b4a45b9769037c55c2d238bd0942d2e' } let(:target_branch) { 'test-merge-target-branch' } + let(:target_sha) { '6d394385cf567f80a8fd85055db1ab4c5295806f' } before do - repository.create_branch(target_branch, '6d394385cf567f80a8fd85055db1ab4c5295806f') + repository.create_branch(target_branch, target_sha) end it 'can perform a merge' do merge_commit_id = nil - result = repository.merge(user, source_sha, target_branch, 'Test merge') do |commit_id| - merge_commit_id = commit_id - end + result = + repository.merge(user, + source_sha: source_sha, + target_branch: target_branch, + target_sha: target_sha, + message: 'Test merge') do |commit_id| + merge_commit_id = commit_id + end expect(result.newrev).to eq(merge_commit_id) expect(result.repo_created).to eq(false) @@ -2077,10 +2130,15 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen it 'returns nil if there was a concurrent branch update' do concurrent_update_id = '33f3729a45c02fc67d00adb1b8bca394b0e761d9' - result = repository.merge(user, source_sha, target_branch, 'Test merge') do - # This ref update should make the merge fail - repository.write_ref(Gitlab::Git::BRANCH_REF_PREFIX + target_branch, concurrent_update_id) - end + result = + repository.merge(user, + source_sha: source_sha, + target_branch: target_branch, + target_sha: target_sha, + message: 'Test merge') do |_commit_id| + # This ref update should make the merge fail + repository.write_ref(Gitlab::Git::BRANCH_REF_PREFIX + target_branch, concurrent_update_id) + end # This 'nil' signals that the merge was not applied expect(result).to be_nil @@ -2100,7 +2158,13 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen repository.create_branch(target_branch, branch_head) end - subject { repository.ff_merge(user, source_sha, target_branch) } + subject do + repository.ff_merge(user, + source_sha: source_sha, + target_branch: target_branch, + target_sha: branch_head + ) + end shared_examples '#ff_merge' do it 'performs a ff_merge' do @@ -2112,7 +2176,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen end context 'with a non-existing target branch' do - subject { repository.ff_merge(user, source_sha, 'this-isnt-real') } + subject { repository.ff_merge(user, source_sha: source_sha, target_branch: 'this-isnt-real') } it 'throws an ArgumentError' do expect { subject }.to raise_error(ArgumentError) @@ -2140,8 +2204,9 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen it "calls Gitaly's OperationService" do expect_any_instance_of(Gitlab::GitalyClient::OperationService) - .to receive(:user_ff_branch).with(user, source_sha, target_branch) - .and_return(nil) + .to receive(:user_ff_branch).with( + user, source_sha: source_sha, target_branch: target_branch, target_sha: branch_head + ).and_return(nil) subject end diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb index 1b8da0b380b..c5b44b260c6 100644 --- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb +++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require 'json' require 'tempfile' -RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitlay do +RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, feature_category: :gitaly do let(:project) { create(:project, :repository) } let(:repository) { project.repository } let(:feature_flag_name) { wrapper.rugged_feature_keys.first } diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 10a099af4f0..ea2c239df07 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GitAccess, :aggregate_failures do +RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :authentication_and_authorization do include TermsHelper include AdminModeHelper include ExternalAuthorizationServiceHelpers @@ -113,9 +113,9 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures do end end - context 'when the external_authorization_service is enabled' do + context 'when the the deploy key is restricted with external_authorization' do before do - stub_application_setting(external_authorization_service_enabled: true) + allow(Gitlab::ExternalAuthorization).to receive(:allow_deploy_tokens_and_deploy_keys?).and_return(false) end it 'blocks push and pull with "not found"' do @@ -191,9 +191,9 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures do end end - context 'when the external_authorization_service is enabled' do + context 'when the the deploy token is restricted with external_authorization' do before do - stub_application_setting(external_authorization_service_enabled: true) + allow(Gitlab::ExternalAuthorization).to receive(:allow_deploy_tokens_and_deploy_keys?).and_return(false) end it 'blocks pull access' do diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb index ff3cade07c0..252d20d9c3a 100644 --- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb @@ -63,6 +63,47 @@ RSpec.describe Gitlab::GitalyClient::CommitService do end end + context 'when given a whitespace param' do + context 'and the param is true' do + it 'uses the ignore all white spaces const' do + request = Gitaly::CommitDiffRequest.new + + expect(Gitaly::CommitDiffRequest).to receive(:new) + .with(hash_including(whitespace_changes: Gitaly::CommitDiffRequest::WhitespaceChanges::WHITESPACE_CHANGES_IGNORE_ALL)).and_return(request) + + expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:commit_diff).with(request, kind_of(Hash)) + + client.diff_from_parent(commit, ignore_whitespace_change: true) + end + end + + context 'and the param is false' do + it 'does not set a whitespace param' do + request = Gitaly::CommitDiffRequest.new + + expect(Gitaly::CommitDiffRequest).to receive(:new) + .with(hash_not_including(:whitespace_changes)).and_return(request) + + expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:commit_diff).with(request, kind_of(Hash)) + + client.diff_from_parent(commit, ignore_whitespace_change: false) + end + end + end + + context 'when given no whitespace param' do + it 'does not set a whitespace param' do + request = Gitaly::CommitDiffRequest.new + + expect(Gitaly::CommitDiffRequest).to receive(:new) + .with(hash_not_including(:whitespace_changes)).and_return(request) + + expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:commit_diff).with(request, kind_of(Hash)) + + client.diff_from_parent(commit) + end + end + it 'returns a Gitlab::GitalyClient::DiffStitcher' do ret = client.diff_from_parent(commit) diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb index 82d5d0f292b..84672eb81c0 100644 --- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GitalyClient::OperationService do +RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source_code_management do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :repository) } @@ -42,21 +42,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do expect(subject.dereferenced_target).to eq(commit) end - context "when pre_receive_error is present" do - let(:response) do - Gitaly::UserCreateBranchResponse.new(pre_receive_error: "GitLab: something failed") - end - - it "throws a PreReceive exception" do - expect_any_instance_of(Gitaly::OperationService::Stub) - .to receive(:user_create_branch).with(request, kind_of(Hash)) - .and_return(response) - - expect { subject }.to raise_error( - Gitlab::Git::PreReceiveError, "something failed") - end - end - context 'with structured errors' do context 'with CustomHookError' do let(:stdout) { nil } @@ -232,21 +217,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do subject end - context "when pre_receive_error is present" do - let(:response) do - Gitaly::UserDeleteBranchResponse.new(pre_receive_error: "GitLab: something failed") - end - - it "throws a PreReceive exception" do - expect_any_instance_of(Gitaly::OperationService::Stub) - .to receive(:user_delete_branch).with(request, kind_of(Hash)) - .and_return(response) - - expect { subject }.to raise_error( - Gitlab::Git::PreReceiveError, "something failed") - end - end - context 'with a custom hook error' do let(:stdout) { nil } let(:stderr) { nil } @@ -309,12 +279,39 @@ RSpec.describe Gitlab::GitalyClient::OperationService do describe '#user_merge_branch' do let(:target_branch) { 'master' } + let(:target_sha) { repository.commit(target_branch).sha } let(:source_sha) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' } let(:message) { 'Merge a branch' } - subject { client.user_merge_branch(user, source_sha, target_branch, message) {} } + subject do + client.user_merge_branch(user, + source_sha: source_sha, + target_branch: target_branch, + target_sha: target_sha, + message: message + ) {} + end + + it 'sends a user_merge_branch message', :freeze_time do + first_request = + Gitaly::UserMergeBranchRequest.new( + repository: repository.gitaly_repository, + user: gitaly_user, + commit_id: source_sha, + branch: target_branch, + expected_old_oid: target_sha, + message: message, + timestamp: Google::Protobuf::Timestamp.new(seconds: Time.now.utc.to_i) + ) + + second_request = Gitaly::UserMergeBranchRequest.new(apply: true) + + expect_next_instance_of(Gitlab::GitalyClient::QueueEnumerator) do |instance| + expect(instance).to receive(:push).with(first_request).and_call_original + expect(instance).to receive(:push).with(second_request).and_call_original + expect(instance).to receive(:close) + end - it 'sends a user_merge_branch message' do expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate) expect(subject.newrev).to be_present expect(subject.repo_created).to be(false) @@ -461,12 +458,14 @@ RSpec.describe Gitlab::GitalyClient::OperationService do describe '#user_ff_branch' do let(:target_branch) { 'my-branch' } + let(:target_sha) { '6d394385cf567f80a8fd85055db1ab4c5295806f' } let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' } let(:request) do Gitaly::UserFFBranchRequest.new( repository: repository.gitaly_repository, branch: target_branch, commit_id: source_sha, + expected_old_oid: target_sha, user: gitaly_user ) end @@ -487,7 +486,13 @@ RSpec.describe Gitlab::GitalyClient::OperationService do .and_return(response) end - subject { client.user_ff_branch(user, source_sha, target_branch) } + subject do + client.user_ff_branch(user, + source_sha: source_sha, + target_branch: target_branch, + target_sha: target_sha + ) + end it 'sends a user_ff_branch message and returns a BranchUpdate object' do expect(subject).to be_a(Gitlab::Git::OperationService::BranchUpdate) @@ -574,16 +579,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do ) end - context 'when errors are not raised but returned in the response' do - before do - expect_any_instance_of(Gitaly::OperationService::Stub) - .to receive(:user_cherry_pick).with(kind_of(Gitaly::UserCherryPickRequest), kind_of(Hash)) - .and_return(response) - end - - it_behaves_like 'cherry pick and revert errors' - end - context 'when AccessCheckError is raised' do let(:raised_error) do new_detailed_error( @@ -1149,18 +1144,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do end end - context 'with pre-receive error' do - before do - expect_any_instance_of(Gitaly::OperationService::Stub) - .to receive(:user_create_tag) - .and_return(Gitaly::UserCreateTagResponse.new(pre_receive_error: "GitLab: something failed")) - end - - it 'raises a PreReceiveError' do - expect { add_tag }.to raise_error(Gitlab::Git::PreReceiveError, "something failed") - end - end - context 'with internal error' do before do expect_any_instance_of(Gitaly::OperationService::Stub) diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb index 14d5cef103b..09d8ea3cc0a 100644 --- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GitalyClient::RefService do +RSpec.describe Gitlab::GitalyClient::RefService, feature_category: :gitaly do let_it_be(:project) { create(:project, :repository, create_tag: 'test') } let(:storage_name) { project.repository_storage } @@ -390,10 +390,15 @@ RSpec.describe Gitlab::GitalyClient::RefService do end describe '#list_refs' do + let(:oid) { project.repository.commit.id } + it 'sends a list_refs message' do expect_any_instance_of(Gitaly::RefService::Stub) .to receive(:list_refs) - .with(gitaly_request_with_params(patterns: ['refs/heads/']), kind_of(Hash)) + .with( + gitaly_request_with_params(patterns: ['refs/heads/'], pointing_at_oids: [], peel_tags: false), + kind_of(Hash) + ) .and_call_original client.list_refs @@ -407,6 +412,24 @@ RSpec.describe Gitlab::GitalyClient::RefService do client.list_refs([Gitlab::Git::TAG_REF_PREFIX]) end + + it 'accepts a pointing_at_oids argument' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:list_refs) + .with(gitaly_request_with_params(pointing_at_oids: [oid]), kind_of(Hash)) + .and_call_original + + client.list_refs(pointing_at_oids: [oid]) + end + + it 'accepts a peel_tags argument' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:list_refs) + .with(gitaly_request_with_params(peel_tags: true), kind_of(Hash)) + .and_call_original + + client.list_refs(peel_tags: true) + end end describe '#find_refs_by_oid' do diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb index 5eb60d2caa5..434550186c1 100644 --- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::GitalyClient::RepositoryService do using RSpec::Parameterized::TableSyntax - let(:project) { create(:project) } + let_it_be(:project) { create(:project, :repository) } let(:storage_name) { project.repository_storage } let(:relative_path) { project.disk_path + '.git' } let(:client) { described_class.new(project.repository) } @@ -22,13 +22,38 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do end describe '#optimize_repository' do - it 'sends a optimize_repository message' do - expect_any_instance_of(Gitaly::RepositoryService::Stub) - .to receive(:optimize_repository) - .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash)) - .and_return(double(:optimize_repository)) + shared_examples 'a repository optimization' do + it 'sends a optimize_repository message' do + expect_any_instance_of(Gitaly::RepositoryService::Stub) + .to receive(:optimize_repository) + .with(gitaly_request_with_params( + strategy: expected_strategy + ), kind_of(Hash)) + .and_call_original + + client.optimize_repository(**params) + end + end + + context 'with default parameter' do + let(:params) { {} } + let(:expected_strategy) { :STRATEGY_HEURISTICAL } + + it_behaves_like 'a repository optimization' + end + + context 'with heuristical housekeeping strategy' do + let(:params) { { eager: false } } + let(:expected_strategy) { :STRATEGY_HEURISTICAL } + + it_behaves_like 'a repository optimization' + end + + context 'with eager housekeeping strategy' do + let(:params) { { eager: true } } + let(:expected_strategy) { :STRATEGY_EAGER } - client.optimize_repository + it_behaves_like 'a repository optimization' end end diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb index f5e75242f40..0073d2ebe80 100644 --- a/spec/lib/gitlab/gitaly_client_spec.rb +++ b/spec/lib/gitlab/gitaly_client_spec.rb @@ -155,12 +155,42 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do expect(described_class.stub_creds('default')).to eq(:this_channel_is_insecure) end + it 'returns :this_channel_is_insecure if dns' do + address = 'dns:///localhost:9876' + stub_repos_storages address + + expect(described_class.stub_creds('default')).to eq(:this_channel_is_insecure) + end + + it 'returns :this_channel_is_insecure if dns (short-form)' do + address = 'dns:localhost:9876' + stub_repos_storages address + + expect(described_class.stub_creds('default')).to eq(:this_channel_is_insecure) + end + + it 'returns :this_channel_is_insecure if dns (with authority)' do + address = 'dns://1.1.1.1/localhost:9876' + stub_repos_storages address + + expect(described_class.stub_creds('default')).to eq(:this_channel_is_insecure) + end + it 'returns Credentials object if tls' do address = 'tls://localhost:9876' stub_repos_storages address expect(described_class.stub_creds('default')).to be_a(GRPC::Core::ChannelCredentials) end + + it 'raise an exception if the scheme is not supported' do + address = 'custom://localhost:9876' + stub_repos_storages address + + expect do + described_class.stub_creds('default') + end.to raise_error(/unsupported Gitaly address/i) + end end describe '.create_channel' do @@ -168,7 +198,10 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do [ ['default', 'unix:tmp/gitaly.sock', 'unix:tmp/gitaly.sock'], ['default', 'tcp://localhost:9876', 'localhost:9876'], - ['default', 'tls://localhost:9876', 'localhost:9876'] + ['default', 'tls://localhost:9876', 'localhost:9876'], + ['default', 'dns:///localhost:9876', 'dns:///localhost:9876'], + ['default', 'dns:localhost:9876', 'dns:localhost:9876'], + ['default', 'dns://1.1.1.1/localhost:9876', 'dns://1.1.1.1/localhost:9876'] ] end @@ -289,6 +322,43 @@ RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do expect(stub_commit).to have_same_channel(stub_blob) end end + + context 'when passed a DNS address' do + let(:address) { 'dns:///localhost:9876' } + + before do + stub_repos_storages address + end + + it 'strips dns:/// prefix before passing it to GRPC::Core::Channel initializer' do + expect(Gitaly::CommitService::Stub).to receive(:new).with( + address, nil, channel_override: be_a(GRPC::Core::Channel), interceptors: [] + ) + + described_class.stub(:commit_service, 'default') + end + + it 'shares the same channel object with other stub' do + stub_commit = described_class.stub(:commit_service, 'default') + stub_blob = described_class.stub(:blob_service, 'default') + + expect(stub_commit).to have_same_channel(stub_blob) + end + end + + context 'when passed an unsupported scheme' do + let(:address) { 'custom://localhost:9876' } + + before do + stub_repos_storages address + end + + it 'strips dns:/// prefix before passing it to GRPC::Core::Channel initializer' do + expect do + described_class.stub(:commit_service, 'default') + end.to raise_error(/Unsupported Gitaly address/i) + end + end end describe '.can_use_disk?' do diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb index d69bc4d60ee..e93d585bc3c 100644 --- a/spec/lib/gitlab/github_import/client_spec.rb +++ b/spec/lib/gitlab/github_import/client_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Client, feature_category: :importer do +RSpec.describe Gitlab::GithubImport::Client, feature_category: :importers do subject(:client) { described_class.new('foo', parallel: parallel) } let(:parallel) { true } diff --git a/spec/lib/gitlab/github_import/clients/proxy_spec.rb b/spec/lib/gitlab/github_import/clients/proxy_spec.rb index 9fef57f2a38..0baff7bafcb 100644 --- a/spec/lib/gitlab/github_import/clients/proxy_spec.rb +++ b/spec/lib/gitlab/github_import/clients/proxy_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Clients::Proxy, :manage, feature_category: :import do +RSpec.describe Gitlab::GithubImport::Clients::Proxy, :manage, feature_category: :importers do subject(:client) { described_class.new(access_token, client_options) } let(:access_token) { 'test_token' } diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb index a8dd6b4725d..945b742b025 100644 --- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do +RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter, feature_category: :importers do let(:project) { double(:project, id: 4, import_source: 'foo/bar') } let(:client) { double(:client) } @@ -89,7 +89,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do end end - describe '#parallel_import' do + describe '#parallel_import', :clean_gitlab_redis_cache do it 'imports each diff note in parallel' do importer = described_class.new(project, client) @@ -97,10 +97,8 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do .to receive(:each_object_to_import) .and_yield(github_comment) - expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:bulk_perform_in) - .with(1.second, [ - [project.id, an_instance_of(Hash), an_instance_of(String)] - ], batch_size: 1000, batch_delay: 1.minute) + expect(Gitlab::GithubImport::ImportDiffNoteWorker).to receive(:perform_in) + .with(1.second, project.id, an_instance_of(Hash), an_instance_of(String)) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb index 2c1af4f8948..04b694dc0cb 100644 --- a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter do +RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_category: :importers do subject(:importer) { described_class.new(project, client, parallel: parallel) } let(:project) { instance_double(Project, id: 4, import_source: 'foo/bar') } @@ -73,14 +73,12 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter do end end - describe '#parallel_import' do + describe '#parallel_import', :clean_gitlab_redis_cache do it 'imports each note in parallel' do allow(importer).to receive(:each_object_to_import).and_yield(issue_event) - expect(Gitlab::GithubImport::ImportIssueEventWorker).to receive(:bulk_perform_in).with( - 1.second, [ - [project.id, an_instance_of(Hash), an_instance_of(String)] - ], batch_size: 1000, batch_delay: 1.minute + expect(Gitlab::GithubImport::ImportIssueEventWorker).to receive(:perform_in).with( + 1.second, project.id, an_instance_of(Hash), an_instance_of(String) ) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb index 4a5525c250e..d6fd1a4739c 100644 --- a/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/issues_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter do +RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter, feature_category: :importers do let(:project) { double(:project, id: 4, import_source: 'foo/bar') } let(:client) { double(:client) } let(:created_at) { Time.new(2017, 1, 1, 12, 00) } @@ -82,7 +82,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter do end end - describe '#parallel_import' do + describe '#parallel_import', :clean_gitlab_redis_cache do it 'imports each issue in parallel' do importer = described_class.new(project, client) @@ -91,12 +91,8 @@ RSpec.describe Gitlab::GithubImport::Importer::IssuesImporter do .and_yield(github_issue) expect(Gitlab::GithubImport::ImportIssueWorker) - .to receive(:bulk_perform_in) - .with(1.second, - [[project.id, an_instance_of(Hash), an_instance_of(String)]], - batch_size: 1000, - batch_delay: 1.minute - ) + .to receive(:perform_in) + .with(1.second, project.id, an_instance_of(Hash), an_instance_of(String)) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb index 678aa705b6c..fab9d26532d 100644 --- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do +RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter, feature_category: :importers do let_it_be(:project) { create(:project, :import_started) } let(:client) { double(:client) } @@ -110,7 +110,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do end end - describe '#parallel_import' do + describe '#parallel_import', :clean_gitlab_redis_cache do it 'imports each lfs object in parallel' do importer = described_class.new(project, client) @@ -118,10 +118,8 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do expect(service).to receive(:each_list_item).and_yield(lfs_download_object) end - expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:bulk_perform_in) - .with(1.second, [ - [project.id, an_instance_of(Hash), an_instance_of(String)] - ], batch_size: 1000, batch_delay: 1.minute) + expect(Gitlab::GithubImport::ImportLfsObjectWorker).to receive(:perform_in) + .with(1.second, project.id, an_instance_of(Hash), an_instance_of(String)) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb index ca4560b6a1a..841cc8178ea 100644 --- a/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/notes_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::NotesImporter do +RSpec.describe Gitlab::GithubImport::Importer::NotesImporter, feature_category: :importers do let(:project) { double(:project, id: 4, import_source: 'foo/bar') } let(:client) { double(:client) } @@ -75,7 +75,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter do end end - describe '#parallel_import' do + describe '#parallel_import', :clean_gitlab_redis_cache do it 'imports each note in parallel' do importer = described_class.new(project, client) @@ -83,10 +83,8 @@ RSpec.describe Gitlab::GithubImport::Importer::NotesImporter do .to receive(:each_object_to_import) .and_yield(github_comment) - expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:bulk_perform_in) - .with(1.second, [ - [project.id, an_instance_of(Hash), an_instance_of(String)] - ], batch_size: 1000, batch_delay: 1.minute) + expect(Gitlab::GithubImport::ImportNoteWorker).to receive(:perform_in) + .with(1.second, project.id, an_instance_of(Hash), an_instance_of(String)) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb index 8809d58a252..6a8b14a2690 100644 --- a/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/protected_branches_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do +RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter, feature_category: :importers do subject(:importer) { described_class.new(project, client, parallel: parallel) } let(:project) { instance_double('Project', id: 4, import_source: 'foo/bar') } @@ -143,13 +143,9 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchesImporter do it 'imports each protected branch in parallel' do expect(Gitlab::GithubImport::ImportProtectedBranchWorker) - .to receive(:bulk_perform_in) - .with( - 1.second, - [[project.id, an_instance_of(Hash), an_instance_of(String)]], - batch_delay: 1.minute, - batch_size: 1000 - ) + .to receive(:perform_in) + .with(1.second, project.id, an_instance_of(Hash), an_instance_of(String)) + expect(Gitlab::GithubImport::ObjectCounter) .to receive(:increment).with(project, :protected_branch, :fetched) diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb index 2e1a3c496cc..3e62e8f473c 100644 --- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean_gitlab_redis_cache do +RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, + :clean_gitlab_redis_cache, feature_category: :importers do using RSpec::Parameterized::TableSyntax let_it_be(:merge_request) { create(:merge_request) } @@ -39,6 +40,19 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean expect(merge_request.reviewers).to contain_exactly(author) end end + + context 'when because of concurrency an attempt of duplication appeared' do + before do + allow(MergeRequestReviewer) + .to receive(:create!).and_raise(ActiveRecord::RecordNotUnique) + end + + it 'does not change Merge Request reviewers', :aggregate_failures do + expect { subject.execute }.not_to change(MergeRequestReviewer, :count) + + expect(merge_request.reviewers).to contain_exactly(author) + end + end end shared_examples 'imports an approval for the Merge Request' do diff --git a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb index 3bf1976ee10..536983fea06 100644 --- a/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_requests/review_requests_importer_spec.rb @@ -2,7 +2,8 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter, :clean_gitlab_redis_cache do +RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImporter, :clean_gitlab_redis_cache, + feature_category: :importers do subject(:importer) { described_class.new(project, client) } let_it_be(:project) { create(:project, import_source: 'foo') } @@ -107,12 +108,10 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor it 'schedule import for each merge request reviewers' do expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker) - .to receive(:bulk_perform_in).with( - 1.second, - match_array(expected_worker_payload), - batch_size: 1000, - batch_delay: 1.minute - ) + .to receive(:perform_in).with(1.second, *expected_worker_payload.first) + + expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker) + .to receive(:perform_in).with(1.second, *expected_worker_payload.second) importer.parallel_import end @@ -127,12 +126,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewRequestsImpor it "doesn't schedule import this merge request reviewers" do expect(Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker) - .to receive(:bulk_perform_in).with( - 1.second, - expected_worker_payload.slice(1, 1), - batch_size: 1000, - batch_delay: 1.minute - ) + .to receive(:perform_in).with(1.second, *expected_worker_payload.second) importer.parallel_import end diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb index aa92abdb110..eddde272d2c 100644 --- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do +RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter, feature_category: :importers do let(:url) { 'https://github.com/foo/bar.git' } let(:project) { create(:project, import_source: 'foo/bar', import_url: url) } let(:client) { double(:client) } @@ -92,7 +92,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do end end - describe '#parallel_import' do + describe '#parallel_import', :clean_gitlab_redis_cache do it 'imports each note in parallel' do importer = described_class.new(project, client) @@ -101,13 +101,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do .and_yield(pull_request) expect(Gitlab::GithubImport::ImportPullRequestWorker) - .to receive(:bulk_perform_in) - .with( - 1.second, - [[project.id, an_instance_of(Hash), an_instance_of(String)]], - batch_delay: 1.minute, - batch_size: 200 - ) + .to receive(:perform_in) + .with(1.second, project.id, an_instance_of(Hash), an_instance_of(String)) waiter = importer.parallel_import diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb index e522f74416c..92a979eddd2 100644 --- a/spec/lib/gitlab/github_import/object_counter_spec.rb +++ b/spec/lib/gitlab/github_import/object_counter_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do - let_it_be(:project) { create(:project, :import_started, import_type: 'github') } + let_it_be(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://github.com/vim/vim.git') } it 'validates the operation being incremented' do expect { described_class.increment(project, :issue, :unknown) } @@ -57,4 +57,57 @@ RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do described_class.increment(project, :issue, :fetched) end + + describe '.summary' do + context 'when there are cached import statistics' do + before do + described_class.increment(project, :issue, :fetched, value: 10) + described_class.increment(project, :issue, :imported, value: 8) + end + + it 'includes cached object counts stats in response' do + expect(described_class.summary(project)).to eq( + 'fetched' => { 'issue' => 10 }, + 'imported' => { 'issue' => 8 } + ) + end + end + + context 'when there are no cached import statistics' do + context 'when project import is in progress' do + it 'includes an empty object counts stats in response' do + expect(described_class.summary(project)).to eq(described_class::EMPTY_SUMMARY) + end + end + + context 'when project import is not in progress' do + let(:checksums) do + { + 'fetched' => { + "issue" => 2, + "label" => 10, + "note" => 2, + "protected_branch" => 2, + "pull_request" => 2 + }, + "imported" => { + "issue" => 2, + "label" => 10, + "note" => 2, + "protected_branch" => 2, + "pull_request" => 2 + } + } + end + + before do + project.import_state.update_columns(checksums: checksums, status: :finished) + end + + it 'includes project import checksums in response' do + expect(described_class.summary(project)).to eq(checksums) + end + end + end + end end diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb index cefad3baa31..c351ead91eb 100644 --- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb +++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::ParallelScheduling do +RSpec.describe Gitlab::GithubImport::ParallelScheduling, feature_category: :importers do let(:importer_class) do Class.new do def self.name @@ -266,7 +266,7 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do end end - describe '#parallel_import' do + describe '#parallel_import', :clean_gitlab_redis_cache do let(:importer) { importer_class.new(project, client) } let(:repr_class) { double(:representation) } let(:worker_class) { double(:worker) } @@ -286,25 +286,82 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do allow(repr_class) .to receive(:from_api_response) .with(object, {}) - .and_return({ title: 'Foo' }) + .and_return({ title: 'One' }, { title: 'Two' }, { title: 'Three' }) end context 'with multiple objects' do before do + stub_feature_flags(improved_spread_parallel_import: false) + expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object) end it 'imports data in parallel batches with delays' do expect(worker_class).to receive(:bulk_perform_in) .with(1.second, [ - [project.id, { title: 'Foo' }, an_instance_of(String)], - [project.id, { title: 'Foo' }, an_instance_of(String)], - [project.id, { title: 'Foo' }, an_instance_of(String)] + [project.id, { title: 'One' }, an_instance_of(String)], + [project.id, { title: 'Two' }, an_instance_of(String)], + [project.id, { title: 'Three' }, an_instance_of(String)] ], batch_size: batch_size, batch_delay: batch_delay) importer.parallel_import end end + + context 'when the feature flag `improved_spread_parallel_import` is enabled' do + before do + stub_feature_flags(improved_spread_parallel_import: true) + end + + it 'imports data in parallel with delays respecting parallel_import_batch definition and return job waiter' do + allow(::Gitlab::JobWaiter).to receive(:generate_key).and_return('waiter-key') + allow(importer).to receive(:parallel_import_batch).and_return({ size: 2, delay: 1.minute }) + + expect(importer).to receive(:each_object_to_import) + .and_yield(object).and_yield(object).and_yield(object) + expect(worker_class).to receive(:perform_in) + .with(1.second, project.id, { title: 'One' }, 'waiter-key').ordered + expect(worker_class).to receive(:perform_in) + .with(1.second, project.id, { title: 'Two' }, 'waiter-key').ordered + expect(worker_class).to receive(:perform_in) + .with(1.minute + 1.second, project.id, { title: 'Three' }, 'waiter-key').ordered + + job_waiter = importer.parallel_import + + expect(job_waiter.key).to eq('waiter-key') + expect(job_waiter.jobs_remaining).to eq(3) + end + + context 'when job restarts due to API rate limit or Sidekiq interruption' do + before do + cache_key = format(described_class::JOB_WAITER_CACHE_KEY, + project: project.id, collection: importer.collection_method) + Gitlab::Cache::Import::Caching.write(cache_key, 'waiter-key') + + cache_key = format(described_class::JOB_WAITER_REMAINING_CACHE_KEY, + project: project.id, collection: importer.collection_method) + Gitlab::Cache::Import::Caching.write(cache_key, 3) + end + + it "restores job waiter's key and jobs_remaining" do + allow(importer).to receive(:parallel_import_batch).and_return({ size: 1, delay: 1.minute }) + + expect(importer).to receive(:each_object_to_import).and_yield(object).and_yield(object).and_yield(object) + + expect(worker_class).to receive(:perform_in) + .with(1.second, project.id, { title: 'One' }, 'waiter-key').ordered + expect(worker_class).to receive(:perform_in) + .with(1.minute + 1.second, project.id, { title: 'Two' }, 'waiter-key').ordered + expect(worker_class).to receive(:perform_in) + .with(2.minutes + 1.second, project.id, { title: 'Three' }, 'waiter-key').ordered + + job_waiter = importer.parallel_import + + expect(job_waiter.key).to eq('waiter-key') + expect(job_waiter.jobs_remaining).to eq(6) + end + end + end end describe '#each_object_to_import' do diff --git a/spec/lib/gitlab/graphql/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb index c9b47219198..55650b0480e 100644 --- a/spec/lib/gitlab/graphql/deprecation_spec.rb +++ b/spec/lib/gitlab/graphql/deprecations/deprecation_spec.rb @@ -3,7 +3,7 @@ require 'fast_spec_helper' require 'active_model' -RSpec.describe ::Gitlab::Graphql::Deprecation do +RSpec.describe ::Gitlab::Graphql::Deprecations::Deprecation, feature_category: :integrations do let(:options) { {} } subject(:deprecation) { described_class.new(**options) } diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb index 974951ab30c..9e754f9673e 100644 --- a/spec/lib/gitlab/graphql/markdown_field_spec.rb +++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb @@ -10,7 +10,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do field = class_with_markdown_field(:test_html, null: true, method: :hello).fields['testHtml'] expect(field.name).to eq('testHtml') - expect(field.description).to eq('The GitLab Flavored Markdown rendering of `hello`') + expect(field.description).to eq('GitLab Flavored Markdown rendering of `hello`') expect(field.type).to eq(GraphQL::Types::String) expect(field.complexity).to eq(5) end diff --git a/spec/lib/gitlab/graphql/queries_spec.rb b/spec/lib/gitlab/graphql/queries_spec.rb index 2c2ec821385..c556f507876 100644 --- a/spec/lib/gitlab/graphql/queries_spec.rb +++ b/spec/lib/gitlab/graphql/queries_spec.rb @@ -360,5 +360,29 @@ RSpec.describe Gitlab::Graphql::Queries do end end end + + context 'a query containing a persist directive' do + let(:path) { 'persist_directive.query.graphql' } + + it_behaves_like 'a valid GraphQL query for the blog schema' + + it 'is tagged as a client query' do + expect(subject.validate(schema).first).to eq :client_query + end + end + + context 'a query containing a persistantly directive' do + let(:path) { 'persistantly_directive.query.graphql' } + + it 'is not tagged as a client query' do + expect(subject.validate(schema).first).not_to eq :client_query + end + end + + context 'a query containing a persist field' do + let(:path) { 'persist_field.query.graphql' } + + it_behaves_like 'a valid GraphQL query for the blog schema' + end end end diff --git a/spec/lib/gitlab/http_connection_adapter_spec.rb b/spec/lib/gitlab/http_connection_adapter_spec.rb index 5e2c6be8993..5137e098e2d 100644 --- a/spec/lib/gitlab/http_connection_adapter_spec.rb +++ b/spec/lib/gitlab/http_connection_adapter_spec.rb @@ -111,20 +111,6 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do end end - context 'when http(s) environment variable is set' do - before do - stub_env('https_proxy' => 'https://my.proxy') - end - - it 'sets up the connection' do - expect(connection).to be_a(Gitlab::NetHttpAdapter) - expect(connection.address).to eq('example.org') - expect(connection.hostname_override).to eq(nil) - expect(connection.addr_port).to eq('example.org') - expect(connection.port).to eq(443) - end - end - context 'when URL scheme is not HTTP/HTTPS' do let(:uri) { URI('ssh://example.org') } diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 8750bf4387c..0c2c3ffc664 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -386,6 +386,7 @@ create_access_levels: - user - protected_tag - group +- deploy_key container_repositories: - project - name @@ -486,6 +487,7 @@ project: - requesters - namespace_members - namespace_requesters +- namespace_members_and_requesters - deploy_keys_projects - deploy_keys - users_star_projects @@ -669,6 +671,8 @@ project: - disable_download_button - dependency_list_exports - sbom_occurrences +- analytics_dashboards_configuration_project +- analytics_dashboards_pointer award_emoji: - awardable - user diff --git a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb index 7e17d56def0..572f809e43b 100644 --- a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb +++ b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb @@ -18,7 +18,7 @@ RSpec.describe 'Import/Export attribute configuration' do it 'has no new columns' do relation_names_for(:project).each do |relation_name| relation_class = relation_class_for_name(relation_name) - relation_attributes = relation_class.new.attributes.keys - relation_class.encrypted_attributes.keys.map(&:to_s) + relation_attributes = relation_class.new.attributes.keys - relation_class.attr_encrypted_attributes.keys.map(&:to_s) current_attributes = parsed_attributes(relation_name, relation_attributes) safe_attributes = safe_model_attributes[relation_class.to_s].dup || [] diff --git a/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb b/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb index 18f2e8f80d7..6c997dc1361 100644 --- a/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb +++ b/spec/lib/gitlab/import_export/import_export_equivalence_spec.rb @@ -13,7 +13,7 @@ require 'spec_helper' # - randomly generated fields like tokens # # as these are expected to change between import/export cycles. -RSpec.describe Gitlab::ImportExport do +RSpec.describe Gitlab::ImportExport, feature_category: :importers do include ImportExport::CommonUtil include ConfigurationHelper include ImportExport::ProjectTreeExpectations @@ -25,7 +25,8 @@ RSpec.describe Gitlab::ImportExport do end it 'yields the initial tree when importing and exporting it again' do - project = create(:project, creator: create(:user, :admin)) + project = create(:project) + user = create(:user, :admin) # We first generate a test fixture dynamically from a seed-fixture, so as to # account for any fields in the initial fixture that are missing and set to @@ -34,6 +35,7 @@ RSpec.describe Gitlab::ImportExport do expect( restore_then_save_project( project, + user, import_path: seed_fixture_path, export_path: test_fixture_path) ).to be true @@ -42,6 +44,7 @@ RSpec.describe Gitlab::ImportExport do expect( restore_then_save_project( project, + user, import_path: test_fixture_path, export_path: test_tmp_path) ).to be true diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb index c9d559c992c..53d205850c8 100644 --- a/spec/lib/gitlab/import_export/importer_spec.rb +++ b/spec/lib/gitlab/import_export/importer_spec.rb @@ -6,8 +6,8 @@ RSpec.describe Gitlab::ImportExport::Importer do let(:user) { create(:user) } let(:test_path) { "#{Dir.tmpdir}/importer_spec" } let(:shared) { project.import_export_shared } - let(:project) { create(:project) } let(:import_file) { fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz') } + let_it_be(:project) { create(:project) } subject(:importer) { described_class.new(project) } diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb index 02ac8065c9f..103d3512e8b 100644 --- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb +++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do +RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:release) { create(:release) } let_it_be(:group) { create(:group) } @@ -213,59 +213,143 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do end end - describe 'conditional export of included associations' do + describe 'with inaccessible associations' do + let_it_be(:milestone) { create(:milestone, project: exportable) } + let_it_be(:issue) { create(:issue, assignees: [user], project: exportable, milestone: milestone) } + let_it_be(:label1) { create(:label, project: exportable) } + let_it_be(:label2) { create(:label, project: exportable) } + let_it_be(:link1) { create(:label_link, label: label1, target: issue) } + let_it_be(:link2) { create(:label_link, label: label2, target: issue) } + + let(:options) { { include: [{ label_links: { include: [:label] } }, { milestone: { include: [] } }] } } + let(:include) do - [{ issues: { include: [{ label_links: { include: [:label] } }] } }] + [{ issues: options }] end - let(:include_if_exportable) do - { issues: [:label_links] } + shared_examples 'record with exportable associations' do + it 'includes exportable association' do + expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(expected_issue)) + + subject.execute + end end - let_it_be(:label) { create(:label, project: exportable) } - let_it_be(:link) { create(:label_link, label: label, target: issue) } + context 'conditional export of included associations' do + let(:include_if_exportable) do + { issues: [:label_links, :milestone] } + end - context 'when association is exportable' do - before do - allow_next_found_instance_of(Issue) do |issue| - allow(issue).to receive(:exportable_association?).with(:label_links, current_user: user).and_return(true) + context 'when association is exportable' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:exportable_association?).with(:label_links, current_user: user).and_return(true) + allow(issue).to receive(:exportable_association?).with(:milestone, current_user: user).and_return(true) + end + end + + it_behaves_like 'record with exportable associations' do + let(:expected_issue) { issue.to_json(options) } end end - it 'includes exportable association' do - expected_issue = issue.to_json(include: [{ label_links: { include: [:label] } }]) + context 'when an association is not exportable' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:exportable_association?).with(:label_links, current_user: user).and_return(true) + allow(issue).to receive(:exportable_association?).with(:milestone, current_user: user).and_return(false) + end + end - expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(expected_issue)) + it_behaves_like 'record with exportable associations' do + let(:expected_issue) { issue.to_json(include: [{ label_links: { include: [:label] } }]) } + end + end - subject.execute + context 'when association does not respond to exportable_association?' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:respond_to?).and_call_original + allow(issue).to receive(:respond_to?).with(:exportable_association?).and_return(false) + end + end + + it_behaves_like 'record with exportable associations' do + let(:expected_issue) { issue.to_json } + end end end - context 'when association is not exportable' do - before do - allow_next_found_instance_of(Issue) do |issue| - allow(issue).to receive(:exportable_association?).with(:label_links, current_user: user).and_return(false) + context 'export of included restricted associations' do + let(:many_relation) { :label_links } + let(:single_relation) { :milestone } + let(:issue_hash) { issue.as_json(options).with_indifferent_access } + let(:expected_issue) { issue.to_json(options) } + + context 'when the association is restricted' do + context 'when some association records are exportable' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:restricted_associations).with([many_relation, single_relation]).and_return([many_relation]) + allow(issue).to receive(:readable_records).with(many_relation, current_user: user).and_return([link1]) + end + end + + it_behaves_like 'record with exportable associations' do + let(:expected_issue) do + issue_hash[many_relation].delete_if { |record| record['id'] == link2.id } + issue_hash.to_json(options) + end + end end - end - it 'filters out not exportable association' do - expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(issue.to_json)) + context 'when all association records are exportable' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:restricted_associations).with([many_relation, single_relation]).and_return([many_relation]) + allow(issue).to receive(:readable_records).with(many_relation, current_user: user).and_return([link1, link2]) + end + end - subject.execute - end - end + it_behaves_like 'record with exportable associations' + end - context 'when association does not respond to exportable_association?' do - before do - allow_next_found_instance_of(Issue) do |issue| - allow(issue).to receive(:respond_to?).with(:exportable_association?).and_return(false) + context 'when the single association record is exportable' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:restricted_associations).with([many_relation, single_relation]).and_return([single_relation]) + allow(issue).to receive(:readable_records).with(single_relation, current_user: user).and_return(milestone) + end + end + + it_behaves_like 'record with exportable associations' + end + + context 'when the single association record is not exportable' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:restricted_associations).with([many_relation, single_relation]).and_return([single_relation]) + allow(issue).to receive(:readable_records).with(single_relation, current_user: user).and_return(nil) + end + end + + it_behaves_like 'record with exportable associations' do + let(:expected_issue) do + issue_hash[single_relation] = nil + issue_hash.to_json(options) + end + end end end - it 'filters out not exportable association' do - expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(issue.to_json)) + context 'when the associations are not restricted' do + before do + allow_next_found_instance_of(Issue) do |issue| + allow(issue).to receive(:restricted_associations).with([many_relation, single_relation]).and_return([]) + end + end - subject.execute + it_behaves_like 'record with exportable associations' end end end diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb index 936c63fd6cd..d133f54ade5 100644 --- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb +++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_memory_store_caching do +RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_memory_store_caching, feature_category: :importers do let(:group) { create(:group).tap { |g| g.add_maintainer(importer_user) } } let(:project) { create(:project, :repository, group: group) } let(:members_mapper) { double('members_mapper').as_null_object } @@ -418,21 +418,73 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_ end end - context 'merge request access level object' do - let(:relation_sym) { :'ProtectedBranch::MergeAccessLevel' } - let(:relation_hash) { { 'access_level' => 30, 'created_at' => '2022-03-29T09:53:13.457Z', 'updated_at' => '2022-03-29T09:54:13.457Z' } } + describe 'protected branch access levels' do + shared_examples 'access levels' do + let(:relation_hash) { { 'access_level' => access_level, 'created_at' => '2022-03-29T09:53:13.457Z', 'updated_at' => '2022-03-29T09:54:13.457Z' } } - it 'sets access level to maintainer' do - expect(created_object.access_level).to equal(Gitlab::Access::MAINTAINER) + context 'when access level is no one' do + let(:access_level) { Gitlab::Access::NO_ACCESS } + + it 'keeps no one access level' do + expect(created_object.access_level).to equal(access_level) + end + end + + context 'when access level is below maintainer' do + let(:access_level) { Gitlab::Access::DEVELOPER } + + it 'sets access level to maintainer' do + expect(created_object.access_level).to equal(Gitlab::Access::MAINTAINER) + end + end + + context 'when access level is above maintainer' do + let(:access_level) { Gitlab::Access::OWNER } + + it 'sets access level to maintainer' do + expect(created_object.access_level).to equal(Gitlab::Access::MAINTAINER) + end + end + + describe 'root ancestor membership' do + let(:access_level) { Gitlab::Access::DEVELOPER } + + context 'when importer user is root group owner' do + let(:importer_user) { create(:user) } + + it 'keeps access level as is' do + group.add_owner(importer_user) + + expect(created_object.access_level).to equal(access_level) + end + end + + context 'when user membership in root group is missing' do + it 'sets access level to maintainer' do + group.members.delete_all + + expect(created_object.access_level).to equal(Gitlab::Access::MAINTAINER) + end + end + + context 'when root ancestor is not a group' do + it 'sets access level to maintainer' do + expect(created_object.access_level).to equal(Gitlab::Access::MAINTAINER) + end + end + end + end + + describe 'merge access level' do + let(:relation_sym) { :'ProtectedBranch::MergeAccessLevel' } + + include_examples 'access levels' end - end - context 'push access level object' do - let(:relation_sym) { :'ProtectedBranch::PushAccessLevel' } - let(:relation_hash) { { 'access_level' => 30, 'created_at' => '2022-03-29T09:53:13.457Z', 'updated_at' => '2022-03-29T09:54:13.457Z' } } + describe 'push access level' do + let(:relation_sym) { :'ProtectedBranch::PushAccessLevel' } - it 'sets access level to maintainer' do - expect(created_object.access_level).to equal(Gitlab::Access::MAINTAINER) + include_examples 'access levels' end end end diff --git a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb index ac646087a95..6053df8ba97 100644 --- a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb @@ -9,7 +9,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::Project::RelationTreeRestorer do +RSpec.describe Gitlab::ImportExport::Project::RelationTreeRestorer, feature_category: :importers do let_it_be(:importable, reload: true) do create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') end diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index 2699dc10b18..125d1736b9b 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -6,7 +6,7 @@ def match_mr1_note(content_regex) MergeRequest.find_by(title: 'MR1').notes.find { |n| n.note.match(/#{content_regex}/) } end -RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do +RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :importers do include ImportExport::CommonUtil using RSpec::Parameterized::TableSyntax diff --git a/spec/lib/gitlab/import_export/references_configuration_spec.rb b/spec/lib/gitlab/import_export/references_configuration_spec.rb index 6320fbed975..ad165790b77 100644 --- a/spec/lib/gitlab/import_export/references_configuration_spec.rb +++ b/spec/lib/gitlab/import_export/references_configuration_spec.rb @@ -24,7 +24,7 @@ RSpec.describe 'Import/Export Project configuration' do context "where relation #{params[:relation_path]}" do it 'does not have prohibited keys' do relation_class = relation_class_for_name(relation_name) - relation_attributes = relation_class.new.attributes.keys - relation_class.encrypted_attributes.keys.map(&:to_s) + relation_attributes = relation_class.new.attributes.keys - relation_class.attr_encrypted_attributes.keys.map(&:to_s) current_attributes = parsed_attributes(relation_name, relation_attributes) prohibited_keys = current_attributes.select do |attribute| prohibited_key?(attribute) || !relation_class.attribute_method?(attribute) diff --git a/spec/lib/gitlab/import_export/version_checker_spec.rb b/spec/lib/gitlab/import_export/version_checker_spec.rb index b3730d85f13..dd81b8b846d 100644 --- a/spec/lib/gitlab/import_export/version_checker_spec.rb +++ b/spec/lib/gitlab/import_export/version_checker_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::ImportExport::VersionChecker, feature_category: :import do +RSpec.describe Gitlab::ImportExport::VersionChecker, feature_category: :importers do include ImportExport::CommonUtil let!(:shared) { Gitlab::ImportExport::Shared.new(nil) } diff --git a/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb b/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb index e2c67c68eb7..7573741082b 100644 --- a/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb +++ b/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb @@ -28,10 +28,10 @@ RSpec.describe Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription d } end - subject(:to_s) { described_class.new(incident_payload).to_s } + subject(:description) { described_class.new(incident_payload).to_s } it 'returns description' do - expect(to_s).to eq( + expect(description).to eq( <<~MARKDOWN.chomp **Incident:** [My new incident](https://webdemo.pagerduty.com/incidents/PRORDTY)#{markdown_line_break} **Incident number:** 33#{markdown_line_break} @@ -52,7 +52,7 @@ RSpec.describe Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription d freeze_time do now = Time.current.utc.strftime('%d %B %Y, %-l:%M%p (%Z)') - expect(to_s).to include( + expect(description).to include( <<~MARKDOWN.chomp **Created at:** #{now}#{markdown_line_break} MARKDOWN @@ -70,7 +70,7 @@ RSpec.describe Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription d end it 'assignees is a list of links' do - expect(to_s).to include( + expect(description).to include( <<~MARKDOWN.chomp **Assignees:** [Laura Haley](https://laura.pagerduty.com), [John Doe](https://john.pagerduty.com)#{markdown_line_break} MARKDOWN @@ -84,7 +84,7 @@ RSpec.describe Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription d end it 'impacted service is a single link' do - expect(to_s).to include( + expect(description).to include( <<~MARKDOWN.chomp **Impacted service:** [XDB Cluster](https://xdb.pagerduty.com) MARKDOWN diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb index ce67d1d0297..8a88328e0c1 100644 --- a/spec/lib/gitlab/instrumentation_helper_spec.rb +++ b/spec/lib/gitlab/instrumentation_helper_spec.rb @@ -5,7 +5,7 @@ require 'rspec-parameterized' require 'support/helpers/rails_helpers' RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cache, :clean_gitlab_redis_cache, - feature_category: :scalability do + :use_null_store_as_repository_cache, feature_category: :scalability do using RSpec::Parameterized::TableSyntax describe '.add_instrumentation_data', :request_store do @@ -23,42 +23,19 @@ RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cac expect(payload).to include(db_count: 0, db_cached_count: 0, db_write_count: 0) end - shared_examples 'make Gitaly calls' do - context 'when Gitaly calls are made' do - it 'adds Gitaly and Redis data' do - project = create(:project) - RequestStore.clear! - project.repository.exists? + context 'when Gitaly calls are made' do + it 'adds Gitaly and Redis data' do + project = create(:project) + RequestStore.clear! + project.repository.exists? - subject - - expect(payload[:gitaly_calls]).to eq(1) - expect(payload[:gitaly_duration_s]).to be >= 0 - # With MultiStore, the number of `redis_calls` depends on whether primary_store - # (Gitlab::Redis::Repositorycache) and secondary_store (Gitlab::Redis::Cache) are of the same instance. - # In GitLab.com CI, primary and secondary are the same instance, thus only 1 call being made. If primary - # and secondary are different instances, an additional fallback read to secondary_store will be made because - # the first `get` call is a cache miss. Then, the following expect will fail. - expect(payload[:redis_calls]).to eq(1) - expect(payload[:redis_duration_ms]).to be_nil - end - end - end - - context 'when multistore ff use_primary_and_secondary_stores_for_repository_cache is enabled' do - before do - stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: true) - end - - it_behaves_like 'make Gitaly calls' - end + subject - context 'when multistore ff use_primary_and_secondary_stores_for_repository_cache is disabled' do - before do - stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false) + expect(payload[:gitaly_calls]).to eq(1) + expect(payload[:gitaly_duration_s]).to be >= 0 + expect(payload[:redis_calls]).to eq(nil) + expect(payload[:redis_duration_ms]).to be_nil end - - it_behaves_like 'make Gitaly calls' end context 'when Redis calls are made' do diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb index a9edb2b530b..af2da8f20c0 100644 --- a/spec/lib/gitlab/job_waiter_spec.rb +++ b/spec/lib/gitlab/job_waiter_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::JobWaiter, :redis do +RSpec.describe Gitlab::JobWaiter, :redis, feature_category: :shared do describe '.notify' do it 'pushes the jid to the named queue' do key = described_class.new.key @@ -15,6 +15,14 @@ RSpec.describe Gitlab::JobWaiter, :redis do end end + describe '.generate_key' do + it 'generates and return a new key' do + key = described_class.generate_key + + expect(key).to include('gitlab:job_waiter:') + end + end + describe '#wait' do let(:waiter) { described_class.new(2) } diff --git a/spec/lib/gitlab/logger_spec.rb b/spec/lib/gitlab/logger_spec.rb index ed22af8355f..9f11f3ac629 100644 --- a/spec/lib/gitlab/logger_spec.rb +++ b/spec/lib/gitlab/logger_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Logger do +RSpec.describe Gitlab::Logger, feature_category: :logging do describe '.build' do before do allow(described_class).to receive(:file_name_noext).and_return('log') @@ -25,40 +25,28 @@ RSpec.describe Gitlab::Logger do using RSpec::Parameterized::TableSyntax where(:env_value, :resulting_level) do - 0 | described_class::DEBUG - :debug | described_class::DEBUG 'debug' | described_class::DEBUG 'DEBUG' | described_class::DEBUG 'DeBuG' | described_class::DEBUG - 1 | described_class::INFO - :info | described_class::INFO 'info' | described_class::INFO 'INFO' | described_class::INFO 'InFo' | described_class::INFO - 2 | described_class::WARN - :warn | described_class::WARN 'warn' | described_class::WARN 'WARN' | described_class::WARN 'WaRn' | described_class::WARN - 3 | described_class::ERROR - :error | described_class::ERROR 'error' | described_class::ERROR 'ERROR' | described_class::ERROR 'ErRoR' | described_class::ERROR - 4 | described_class::FATAL - :fatal | described_class::FATAL 'fatal' | described_class::FATAL 'FATAL' | described_class::FATAL 'FaTaL' | described_class::FATAL - 5 | described_class::UNKNOWN - :unknown | described_class::UNKNOWN 'unknown' | described_class::UNKNOWN 'UNKNOWN' | described_class::UNKNOWN 'UnKnOwN' | described_class::UNKNOWN end with_them do - it 'builds logger if valid log level' do + it 'builds logger if valid log level is provided' do stub_env('GITLAB_LOG_LEVEL', env_value) expect(subject.level).to eq(resulting_level) @@ -69,15 +57,15 @@ RSpec.describe Gitlab::Logger do describe '.log_level' do context 'if GITLAB_LOG_LEVEL is set' do before do - stub_env('GITLAB_LOG_LEVEL', described_class::ERROR) + stub_env('GITLAB_LOG_LEVEL', 'error') end - it 'returns value of GITLAB_LOG_LEVEL' do - expect(described_class.log_level).to eq(described_class::ERROR) + it 'returns value defined by GITLAB_LOG_LEVEL' do + expect(described_class.log_level).to eq('error') end it 'ignores fallback' do - expect(described_class.log_level(fallback: described_class::FATAL)).to eq(described_class::ERROR) + expect(described_class.log_level(fallback: described_class::FATAL)).to eq('error') end end diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb index 0c2c9b89005..7259b5e2484 100644 --- a/spec/lib/gitlab/mail_room/mail_room_spec.rb +++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::MailRoom do +RSpec.describe Gitlab::MailRoom, feature_category: :build do let(:default_port) { 143 } let(:log_path) { Rails.root.join('log', 'mail_room_json.log').to_s } @@ -328,4 +328,123 @@ RSpec.describe Gitlab::MailRoom do end end end + + describe 'mailroom encrypted configuration' do + context "when parsing secrets.yml" do + let(:application_secrets_file) { Rails.root.join('spec/fixtures/mail_room/secrets.yml.erb').to_s } + let(:encrypted_settings_key_base) { '0123456789abcdef' * 8 } + + before do + allow(described_class).to receive(:application_secrets_file).and_return(application_secrets_file) + stub_env('KEY', 'an environment variable value') + described_class.instance_variable_set(:@application_secrets, nil) + end + + after do + described_class.instance_variable_set(:@application_secrets, nil) + end + + it 'reads in the secrets.yml file as erb and merges shared and test environments' do + application_secrets = described_class.send(:application_secrets) + + expect(application_secrets).to match(a_hash_including( + a_shared_key: 'this key is shared', + an_overriden_shared_key: 'the merge overwrote this key', + an_environment_specific_key: 'test environment value', + erb_env_key: 'an environment variable value', + encrypted_settings_key_base: encrypted_settings_key_base + )) + + expect(application_secrets[:an_unread_key]).to be_nil + end + end + + context "when parsing gitlab.yml" do + let(:plain_configs) { configs } + let(:shared_path_config) do + { shared: { path: '/this/is/my/shared_path' } }.merge(configs) + end + + let(:encrypted_settings_config) do + { + shared: { path: '/this/is/my/shared_path' }, + encrypted_settings: { path: '/this/is/my_custom_encrypted_path' } + }.merge(configs) + end + + let(:encrypted_file_config) do + configs.deep_merge({ + incoming_email: { encrypted_secret_file: '/custom_incoming_secret.yaml.enc' }, + service_desk_email: { encrypted_secret_file: '/custom_service_desk_secret.yaml.enc' } + }) + end + + it 'returns default encrypted_secret_file path' do + allow(described_class).to receive(:load_yaml).and_return(plain_configs) + + expect(described_class.send(:encrypted_secret_file, :incoming_email)) + .to end_with('shared/encrypted_settings/incoming_email.yaml.enc') + + expect(described_class.send(:encrypted_secret_file, :service_desk_email)) + .to end_with('shared/encrypted_settings/service_desk_email.yaml.enc') + end + + it 'returns encrypted_secret_file relative to custom shared path' do + allow(described_class).to receive(:load_yaml).and_return(shared_path_config) + + expect(described_class.send(:encrypted_secret_file, :incoming_email)) + .to eq('/this/is/my/shared_path/encrypted_settings/incoming_email.yaml.enc') + + expect(described_class.send(:encrypted_secret_file, :service_desk_email)) + .to eq('/this/is/my/shared_path/encrypted_settings/service_desk_email.yaml.enc') + end + + it 'returns custom encrypted_secret_file' do + allow(described_class).to receive(:load_yaml).and_return(encrypted_file_config) + + expect(described_class.send(:encrypted_secret_file, :incoming_email)) + .to eq('/custom_incoming_secret.yaml.enc') + + expect(described_class.send(:encrypted_secret_file, :service_desk_email)) + .to eq('/custom_service_desk_secret.yaml.enc') + end + end + + context 'when using encrypted secrets' do + let(:mail_room_template) { ERB.new(File.read(Rails.root.join("./config/mail_room.yml"))).result } + let(:mail_room_yml) { YAML.safe_load(mail_room_template, permitted_classes: [Symbol]) } + let(:application_secrets) { { encrypted_settings_key_base: '0123456789abcdef' * 8 } } # gitleaks:allow + let(:configs) do + { + encrypted_settings: { path: 'spec/fixtures/mail_room/encrypted_secrets' } + }.merge({ + incoming_email: incoming_email_config, + service_desk_email: service_desk_email_config + }) + end + + before do + allow(described_class).to receive(:application_secrets).and_return(application_secrets) + end + + it 'renders the encrypted secrets into the configuration correctly' do + expect(mail_room_yml[:mailboxes]).to be_an(Array) + expect(mail_room_yml[:mailboxes].length).to eq(2) + + expect(mail_room_yml[:mailboxes][0]).to match( + a_hash_including( + password: 'abc123', + email: 'incoming-test-account@gitlab.com' + ) + ) + + expect(mail_room_yml[:mailboxes][1]).to match( + a_hash_including( + password: '123abc', + email: 'service-desk-test-account@gitlab.example.com' + ) + ) + end + end + end end diff --git a/spec/lib/gitlab/memory/watchdog/configuration_spec.rb b/spec/lib/gitlab/memory/watchdog/configuration_spec.rb index 9242344ead2..4c0f6baef8f 100644 --- a/spec/lib/gitlab/memory/watchdog/configuration_spec.rb +++ b/spec/lib/gitlab/memory/watchdog/configuration_spec.rb @@ -15,7 +15,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Configuration do describe '#handler' do context 'when handler is not set' do it 'defaults to NullHandler' do - expect(configuration.handler).to be(Gitlab::Memory::Watchdog::NullHandler.instance) + expect(configuration.handler).to be(Gitlab::Memory::Watchdog::Handlers::NullHandler.instance) end end end diff --git a/spec/lib/gitlab/memory/watchdog/configurator_spec.rb b/spec/lib/gitlab/memory/watchdog/configurator_spec.rb index a901be84a21..035652abfe6 100644 --- a/spec/lib/gitlab/memory/watchdog/configurator_spec.rb +++ b/spec/lib/gitlab/memory/watchdog/configurator_spec.rb @@ -1,11 +1,8 @@ # frozen_string_literal: true -require 'fast_spec_helper' -require 'prometheus/client' -require 'sidekiq' -require_dependency 'gitlab/cluster/lifecycle_events' +require 'spec_helper' -RSpec.describe Gitlab::Memory::Watchdog::Configurator do +RSpec.describe Gitlab::Memory::Watchdog::Configurator, feature_category: :application_performance do shared_examples 'as configurator' do |handler_class, event_reporter_class, sleep_time_env, sleep_time| it 'configures the correct handler' do configurator.call(configuration) @@ -92,7 +89,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Configurator do end it_behaves_like 'as configurator', - Gitlab::Memory::Watchdog::PumaHandler, + Gitlab::Memory::Watchdog::Handlers::PumaHandler, Gitlab::Memory::Watchdog::EventReporter, 'GITLAB_MEMWD_SLEEP_TIME_SEC', described_class::DEFAULT_SLEEP_INTERVAL_S @@ -200,7 +197,7 @@ RSpec.describe Gitlab::Memory::Watchdog::Configurator do subject(:configurator) { described_class.configure_for_sidekiq } it_behaves_like 'as configurator', - Gitlab::Memory::Watchdog::TermProcessHandler, + Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, Gitlab::Memory::Watchdog::SidekiqEventReporter, 'SIDEKIQ_MEMORY_KILLER_CHECK_INTERVAL', described_class::DEFAULT_SIDEKIQ_SLEEP_INTERVAL_S diff --git a/spec/lib/gitlab/memory/watchdog/event_reporter_spec.rb b/spec/lib/gitlab/memory/watchdog/event_reporter_spec.rb index f667bc724d2..f1d241249e2 100644 --- a/spec/lib/gitlab/memory/watchdog/event_reporter_spec.rb +++ b/spec/lib/gitlab/memory/watchdog/event_reporter_spec.rb @@ -106,11 +106,12 @@ RSpec.describe Gitlab::Memory::Watchdog::EventReporter, feature_category: :appli it 'logs violation' do expect(logger).to receive(:warn) - .with( + .with({ pid: Process.pid, worker_id: 'worker_1', memwd_rss_bytes: 1024, - message: 'dummy_text') + message: 'dummy_text' + }) subject end diff --git a/spec/lib/gitlab/memory/watchdog/handlers/null_handler_spec.rb b/spec/lib/gitlab/memory/watchdog/handlers/null_handler_spec.rb new file mode 100644 index 00000000000..09c76de9611 --- /dev/null +++ b/spec/lib/gitlab/memory/watchdog/handlers/null_handler_spec.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Memory::Watchdog::Handlers::NullHandler, feature_category: :application_performance do + subject(:handler) { described_class.instance } + + describe '#call' do + it 'does nothing' do + expect(handler.call).to be(false) + end + end +end diff --git a/spec/lib/gitlab/memory/watchdog/handlers/puma_handler_spec.rb b/spec/lib/gitlab/memory/watchdog/handlers/puma_handler_spec.rb new file mode 100644 index 00000000000..7df95c1722e --- /dev/null +++ b/spec/lib/gitlab/memory/watchdog/handlers/puma_handler_spec.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Memory::Watchdog::Handlers::PumaHandler, feature_category: :application_performance do + # rubocop: disable RSpec/VerifiedDoubles + # In tests, the Puma constant is not loaded so we cannot make this an instance_double. + let(:puma_worker_handle_class) { double('Puma::Cluster::WorkerHandle') } + let(:puma_worker_handle) { double('worker') } + # rubocop: enable RSpec/VerifiedDoubles + + subject(:handler) { described_class.new({}) } + + before do + stub_const('::Puma::Cluster::WorkerHandle', puma_worker_handle_class) + allow(puma_worker_handle_class).to receive(:new).and_return(puma_worker_handle) + allow(puma_worker_handle).to receive(:term) + end + + describe '#call' do + it 'invokes orderly termination via Puma API' do + expect(puma_worker_handle).to receive(:term) + + expect(handler.call).to be(true) + end + end +end diff --git a/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb new file mode 100644 index 00000000000..d1f303e7731 --- /dev/null +++ b/spec/lib/gitlab/memory/watchdog/handlers/sidekiq_handler_spec.rb @@ -0,0 +1,119 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' +require 'sidekiq' + +RSpec.describe Gitlab::Memory::Watchdog::Handlers::SidekiqHandler, feature_category: :application_performance do + let(:sleep_time) { 3 } + let(:shutdown_timeout_seconds) { 30 } + let(:handler_iterations) { 0 } + let(:logger) { instance_double(::Logger) } + let(:pid) { $$ } + + before do + allow(Gitlab::Metrics::System).to receive(:monotonic_time) + .and_return(0, 1, shutdown_timeout_seconds, 0, 1, Sidekiq[:timeout] + 2) + allow(Process).to receive(:kill) + allow(::Sidekiq).to receive(:logger).and_return(logger) + allow(logger).to receive(:warn) + allow(Process).to receive(:getpgrp).and_return(pid) + end + + subject(:handler) do + described_class.new(shutdown_timeout_seconds, sleep_time).tap do |instance| + # We need to defuse `sleep` and stop the handler after n iteration + iterations = 0 + allow(instance).to receive(:sleep) do + if (iterations += 1) > handler_iterations + instance.stop + end + end + end + end + + describe '#call' do + shared_examples_for 'handler issues kill command' do + it 'logs sending signal' do + logs.each do |log| + expect(::Sidekiq.logger).to receive(:warn).once.ordered.with(log) + end + + handler.call + end + + it 'sends TERM to the current process' do + signal_params.each do |args| + expect(Process).to receive(:kill).once.ordered.with(*args.first(2)) + end + + expect(handler.call).to be(true) + end + end + + def log(signal, pid, explanation, wait_time = nil) + { + pid: pid, + worker_id: ::Prometheus::PidProvider.worker_id, + memwd_handler_class: described_class.to_s, + memwd_signal: signal, + memwd_explanation: explanation, + memwd_wait_time: wait_time, + message: "Sending signal and waiting" + } + end + + let(:logs) do + signal_params.map { |args| log(*args) } + end + + context "when stop is received after TSTP" do + let(:signal_params) do + [ + [:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds] + ] + end + + it_behaves_like 'handler issues kill command' + end + + context "when stop is received after TERM" do + let(:handler_iterations) { 1 } + let(:signal_params) do + [ + [:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds], + [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2] + ] + end + + it_behaves_like 'handler issues kill command' + end + + context "when stop is not received" do + let(:handler_iterations) { 2 } + let(:gpid) { pid + 1 } + let(:kill_pid) { pid } + let(:signal_params) do + [ + [:TSTP, pid, 'stop fetching new jobs', shutdown_timeout_seconds], + [:TERM, pid, 'gracefully shut down', Sidekiq[:timeout] + 2], + [:KILL, kill_pid, 'hard shut down', nil] + ] + end + + before do + allow(Process).to receive(:getpgrp).and_return(gpid) + end + + context 'when process is not group leader' do + it_behaves_like 'handler issues kill command' + end + + context 'when process is a group leader' do + let(:gpid) { pid } + let(:kill_pid) { 0 } + + it_behaves_like 'handler issues kill command' + end + end + end +end diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb index 0b2f24476d9..dd6bfb6da2c 100644 --- a/spec/lib/gitlab/memory/watchdog_spec.rb +++ b/spec/lib/gitlab/memory/watchdog_spec.rb @@ -5,7 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: :application_performance do context 'watchdog' do let(:configuration) { instance_double(described_class::Configuration) } - let(:handler) { instance_double(described_class::NullHandler) } + let(:handler) { instance_double(described_class::Handlers::NullHandler) } let(:reporter) { instance_double(described_class::EventReporter) } let(:sleep_time_seconds) { 60 } let(:threshold_violated) { false } @@ -60,10 +60,10 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: it 'reports started event once' do expect(reporter).to receive(:started).once - .with( + .with({ memwd_handler_class: handler.class.name, memwd_sleep_time_s: sleep_time_seconds - ) + }) watchdog.call end @@ -77,11 +77,11 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: context 'when no monitors are configured' do it 'reports stopped event once with correct reason' do expect(reporter).to receive(:stopped).once - .with( + .with({ memwd_handler_class: handler.class.name, memwd_sleep_time_s: sleep_time_seconds, memwd_reason: 'monitors are not configured' - ) + }) watchdog.call end @@ -96,11 +96,11 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: it 'reports stopped event once' do expect(reporter).to receive(:stopped).once - .with( + .with({ memwd_handler_class: handler.class.name, memwd_sleep_time_s: sleep_time_seconds, memwd_reason: 'background task stopped' - ) + }) watchdog.call end @@ -149,13 +149,13 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: it 'reports strikes exceeded event' do expect(reporter).to receive(:strikes_exceeded) .with( - name, - memwd_handler_class: handler.class.name, - memwd_sleep_time_s: sleep_time_seconds, - memwd_cur_strikes: 1, - memwd_max_strikes: max_strikes, - message: "dummy_text" - ) + name, { + memwd_handler_class: handler.class.name, + memwd_sleep_time_s: sleep_time_seconds, + memwd_cur_strikes: 1, + memwd_max_strikes: max_strikes, + message: "dummy_text" + }) watchdog.call end @@ -163,11 +163,11 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: it 'executes handler and stops the watchdog' do expect(handler).to receive(:call).and_return(true) expect(reporter).to receive(:stopped).once - .with( + .with({ memwd_handler_class: handler.class.name, memwd_sleep_time_s: sleep_time_seconds, memwd_reason: 'successfully handled' - ) + }) watchdog.call end @@ -185,7 +185,7 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: it 'always uses the NullHandler' do expect(handler).not_to receive(:call) - expect(described_class::NullHandler.instance).to receive(:call).and_return(true) + expect(described_class::Handlers::NullHandler.instance).to receive(:call).and_return(true) watchdog.call end @@ -216,56 +216,4 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category: end end end - - context 'handlers' do - context 'NullHandler' do - subject(:handler) { described_class::NullHandler.instance } - - describe '#call' do - it 'does nothing' do - expect(handler.call).to be(false) - end - end - end - - context 'TermProcessHandler' do - subject(:handler) { described_class::TermProcessHandler.new(42) } - - describe '#call' do - before do - allow(Process).to receive(:kill) - end - - it 'sends SIGTERM to the current process' do - expect(Process).to receive(:kill).with(:TERM, 42) - - expect(handler.call).to be(true) - end - end - end - - context 'PumaHandler' do - # rubocop: disable RSpec/VerifiedDoubles - # In tests, the Puma constant is not loaded so we cannot make this an instance_double. - let(:puma_worker_handle_class) { double('Puma::Cluster::WorkerHandle') } - let(:puma_worker_handle) { double('worker') } - # rubocop: enable RSpec/VerifiedDoubles - - subject(:handler) { described_class::PumaHandler.new({}) } - - before do - stub_const('::Puma::Cluster::WorkerHandle', puma_worker_handle_class) - allow(puma_worker_handle_class).to receive(:new).and_return(puma_worker_handle) - allow(puma_worker_handle).to receive(:term) - end - - describe '#call' do - it 'invokes orderly termination via Puma API' do - expect(puma_worker_handle).to receive(:term) - - expect(handler.call).to be(true) - end - end - end - end end diff --git a/spec/lib/gitlab/metrics/environment_spec.rb b/spec/lib/gitlab/metrics/environment_spec.rb new file mode 100644 index 00000000000..e94162e625e --- /dev/null +++ b/spec/lib/gitlab/metrics/environment_spec.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true +require 'fast_spec_helper' +require 'rspec-parameterized' + +require_relative '../../../support/helpers/stub_env' + +RSpec.describe Gitlab::Metrics::Environment, feature_category: :error_budgets do + include StubENV + + describe '.web? .api? .git?' do + using RSpec::Parameterized::TableSyntax + + where(:env_var, :git, :api, :web) do + 'web' | false | false | true + 'api' | false | true | false + 'git' | true | false | false + 'websockets' | false | false | false + nil | true | true | true + '' | true | true | true + end + + with_them do + it 'each method returns as expected' do + stub_env('GITLAB_METRICS_INITIALIZE', env_var) + + expect(described_class.git?).to eq(git) + expect(described_class.web?).to eq(web) + expect(described_class.api?).to eq(api) + end + end + end +end diff --git a/spec/lib/gitlab/metrics/global_search_slis_spec.rb b/spec/lib/gitlab/metrics/global_search_slis_spec.rb index 1aa2c4398a7..5248cd08770 100644 --- a/spec/lib/gitlab/metrics/global_search_slis_spec.rb +++ b/spec/lib/gitlab/metrics/global_search_slis_spec.rb @@ -6,10 +6,22 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do using RSpec::Parameterized::TableSyntax describe '#initialize_slis!' do + let(:api_endpoint_labels) do + [a_hash_including(endpoint_id: 'GET /api/:version/search')] + end + + let(:web_endpoint_labels) do + [a_hash_including(endpoint_id: "SearchController#show")] + end + + let(:all_endpoint_labels) do + api_endpoint_labels + web_endpoint_labels + end + it 'initializes Apdex SLIs for global_search' do expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with( :global_search, - a_kind_of(Array) + array_including(all_endpoint_labels) ) described_class.initialize_slis! @@ -18,11 +30,60 @@ RSpec.describe Gitlab::Metrics::GlobalSearchSlis do it 'initializes ErrorRate SLIs for global_search' do expect(Gitlab::Metrics::Sli::ErrorRate).to receive(:initialize_sli).with( :global_search, - a_kind_of(Array) + array_including(all_endpoint_labels) ) described_class.initialize_slis! end + + context "when initializeing for limited types" do + where(:api, :web) do + [true, false].repeated_permutation(2).to_a + end + + with_them do + it 'only initializes for the relevant endpoints', :aggregate_failures do + allow(Gitlab::Metrics::Environment).to receive(:api?).and_return(api) + allow(Gitlab::Metrics::Environment).to receive(:web?).and_return(web) + allow(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli) + allow(Gitlab::Metrics::Sli::ErrorRate).to receive(:initialize_sli) + + described_class.initialize_slis! + + if api + expect(Gitlab::Metrics::Sli::Apdex).to( + have_received(:initialize_sli).with(:global_search, array_including(*api_endpoint_labels)) + ) + expect(Gitlab::Metrics::Sli::ErrorRate).to( + have_received(:initialize_sli).with(:global_search, array_including(*api_endpoint_labels)) + ) + else + expect(Gitlab::Metrics::Sli::Apdex).not_to( + have_received(:initialize_sli).with(:global_search, array_including(*api_endpoint_labels)) + ) + expect(Gitlab::Metrics::Sli::ErrorRate).not_to( + have_received(:initialize_sli).with(:global_search, array_including(*api_endpoint_labels)) + ) + end + + if web + expect(Gitlab::Metrics::Sli::Apdex).to( + have_received(:initialize_sli).with(:global_search, array_including(*web_endpoint_labels)) + ) + expect(Gitlab::Metrics::Sli::ErrorRate).to( + have_received(:initialize_sli).with(:global_search, array_including(*web_endpoint_labels)) + ) + else + expect(Gitlab::Metrics::Sli::Apdex).not_to( + have_received(:initialize_sli).with(:global_search, array_including(*web_endpoint_labels)) + ) + expect(Gitlab::Metrics::Sli::ErrorRate).not_to( + have_received(:initialize_sli).with(:global_search, array_including(*web_endpoint_labels)) + ) + end + end + end + end end describe '#record_apdex' do diff --git a/spec/lib/gitlab/metrics/rails_slis_spec.rb b/spec/lib/gitlab/metrics/rails_slis_spec.rb index 9da102fb8b8..32d3b7581f1 100644 --- a/spec/lib/gitlab/metrics/rails_slis_spec.rb +++ b/spec/lib/gitlab/metrics/rails_slis_spec.rb @@ -1,27 +1,15 @@ # frozen_string_literal: true require 'spec_helper' -RSpec.describe Gitlab::Metrics::RailsSlis do - # Limit what routes we'll initialize so we don't have to load the entire thing +RSpec.describe Gitlab::Metrics::RailsSlis, feature_category: :error_budgets do before do - api_route = API::API.routes.find do |route| - API::Base.endpoint_id_for_route(route) == "GET /api/:version/version" - end - - allow(Gitlab::RequestEndpoints).to receive(:all_api_endpoints).and_return([api_route]) - allow(Gitlab::RequestEndpoints).to receive(:all_controller_actions).and_return([[ProjectsController, 'index']]) allow(Gitlab::Graphql::KnownOperations).to receive(:default).and_return(Gitlab::Graphql::KnownOperations.new(%w(foo bar))) end describe '.initialize_request_slis!' do - let(:possible_labels) do + let(:web_possible_labels) do [ { - endpoint_id: "GET /api/:version/version", - feature_category: :not_owned, - request_urgency: :default - }, - { endpoint_id: "ProjectsController#index", feature_category: :projects, request_urgency: :default @@ -29,6 +17,28 @@ RSpec.describe Gitlab::Metrics::RailsSlis do ] end + # using the actual `#known_git_endpoints` here makes sure that we keep the + # list up to date as endpoints get removed + let(:git_possible_labels) do + described_class.__send__(:known_git_endpoints).map do |endpoint_id| + a_hash_including({ + endpoint_id: endpoint_id + }) + end + end + + let(:api_possible_labels) do + [{ + endpoint_id: "GET /api/:version/version", + feature_category: :not_owned, + request_urgency: :default + }] + end + + let(:possible_request_labels) do + web_possible_labels + git_possible_labels + api_possible_labels + end + let(:possible_graphql_labels) do ['graphql:foo', 'graphql:bar', 'graphql:unknown'].map do |endpoint_id| { @@ -40,21 +50,57 @@ RSpec.describe Gitlab::Metrics::RailsSlis do end it "initializes the SLI for all possible endpoints if they weren't", :aggregate_failures do - expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(:rails_request, array_including(*possible_labels)).and_call_original + expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(:rails_request, array_including(*possible_request_labels)).and_call_original expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(:graphql_query, array_including(*possible_graphql_labels)).and_call_original - expect(Gitlab::Metrics::Sli::ErrorRate).to receive(:initialize_sli).with(:rails_request, array_including(*possible_labels)).and_call_original + expect(Gitlab::Metrics::Sli::ErrorRate).to receive(:initialize_sli).with(:rails_request, array_including(*possible_request_labels)).and_call_original described_class.initialize_request_slis! end - it "initializes the SLI for all possible endpoints if they weren't given error rate feature flag is disabled", :aggregate_failures do - stub_feature_flags(gitlab_metrics_error_rate_sli: false) + context "when initializeing for limited types" do + using RSpec::Parameterized::TableSyntax - expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(:rails_request, array_including(*possible_labels)).and_call_original - expect(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli).with(:graphql_query, array_including(*possible_graphql_labels)).and_call_original - expect(Gitlab::Metrics::Sli::ErrorRate).not_to receive(:initialize_sli) + where(:git, :api, :web) do + [true, false].repeated_permutation(3).to_a + end - described_class.initialize_request_slis! + with_them do + it 'initializes only with the expected labels', :aggregate_failures do + allow(Gitlab::Metrics::Environment).to receive(:git?).and_return(git) + allow(Gitlab::Metrics::Environment).to receive(:api?).and_return(api) + allow(Gitlab::Metrics::Environment).to receive(:web?).and_return(web) + allow(Gitlab::Metrics::Sli::Apdex).to receive(:initialize_sli) + allow(Gitlab::Metrics::Sli::ErrorRate).to receive(:initialize_sli) + + described_class.initialize_request_slis! + + if git + expect(Gitlab::Metrics::Sli::Apdex).to have_received(:initialize_sli).with(:rails_request, array_including(*git_possible_labels)) + expect(Gitlab::Metrics::Sli::ErrorRate).to have_received(:initialize_sli).with(:rails_request, array_including(*git_possible_labels)) + else + expect(Gitlab::Metrics::Sli::Apdex).not_to have_received(:initialize_sli).with(:rails_request, array_including(*git_possible_labels)) + expect(Gitlab::Metrics::Sli::ErrorRate).not_to have_received(:initialize_sli).with(:rails_request, array_including(*git_possible_labels)) + end + + if api + expect(Gitlab::Metrics::Sli::Apdex).to have_received(:initialize_sli).with(:rails_request, array_including(*api_possible_labels)) + expect(Gitlab::Metrics::Sli::ErrorRate).to have_received(:initialize_sli).with(:rails_request, array_including(*api_possible_labels)) + expect(Gitlab::Metrics::Sli::Apdex).to have_received(:initialize_sli).with(:graphql_query, array_including(*possible_graphql_labels)) + else + expect(Gitlab::Metrics::Sli::Apdex).not_to have_received(:initialize_sli).with(:rails_request, array_including(*api_possible_labels)) + expect(Gitlab::Metrics::Sli::ErrorRate).not_to have_received(:initialize_sli).with(:rails_request, array_including(*api_possible_labels)) + expect(Gitlab::Metrics::Sli::Apdex).to have_received(:initialize_sli).with(:graphql_query, []) + end + + if web + expect(Gitlab::Metrics::Sli::Apdex).to have_received(:initialize_sli).with(:rails_request, array_including(*web_possible_labels)) + expect(Gitlab::Metrics::Sli::ErrorRate).to have_received(:initialize_sli).with(:rails_request, array_including(*web_possible_labels)) + else + expect(Gitlab::Metrics::Sli::Apdex).not_to have_received(:initialize_sli).with(:rails_request, array_including(*web_possible_labels)) + expect(Gitlab::Metrics::Sli::ErrorRate).not_to have_received(:initialize_sli).with(:rails_request, array_including(*web_possible_labels)) + end + end + end end end diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb index 61c690b85e9..97b70b5a7fc 100644 --- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb +++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb @@ -53,18 +53,6 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures, fea subject.call(env) end - it 'does not track error rate when feature flag is disabled' do - stub_feature_flags(gitlab_metrics_error_rate_sli: false) - - expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown') - expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ method: 'get' }, a_positive_execution_time) - expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment) - .with(labels: { feature_category: 'unknown', endpoint_id: 'unknown', request_urgency: :default }, success: true) - expect(Gitlab::Metrics::RailsSlis.request_error_rate).not_to receive(:increment) - - subject.call(env) - end - context 'request is a health check endpoint' do ['/-/liveness', '/-/liveness/', '/-/%6D%65%74%72%69%63%73'].each do |path| context "when path is #{path}" do @@ -116,17 +104,6 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures, fea subject.call(env) end - - it 'does not track error rate when feature flag is disabled' do - stub_feature_flags(gitlab_metrics_error_rate_sli: false) - - expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '500', feature_category: 'unknown') - expect(described_class).not_to receive(:http_request_duration_seconds) - expect(Gitlab::Metrics::RailsSlis).not_to receive(:request_apdex) - expect(Gitlab::Metrics::RailsSlis.request_error_rate).not_to receive(:increment) - - subject.call(env) - end end context '@app.call throws exception' do @@ -415,6 +392,35 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures, fea end end + context 'A request with urgency set on the env (from ETag-caching)' do + let(:env) do + { described_class::REQUEST_URGENCY_KEY => Gitlab::EndpointAttributes::Config::REQUEST_URGENCIES[:medium], + 'REQUEST_METHOD' => 'GET' } + end + + it 'records the request with the correct urgency' do + allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100.1) + expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :medium + }, + success: true + ) + expect(Gitlab::Metrics::RailsSlis.request_error_rate).to receive(:increment).with( + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :medium + }, + error: false + ) + + subject.call(env) + end + end + context 'An unknown request' do let(:env) do { 'REQUEST_METHOD' => 'GET' } diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb index 4569f3134ae..7ce5cbec18d 100644 --- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb @@ -14,7 +14,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do describe '.load_balancing_metric_counter_keys' do context 'multiple databases' do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) end it 'has expected keys' do @@ -91,7 +91,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do describe '.load_balancing_metric_duration_keys' do context 'multiple databases' do before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) end it 'has expected keys' do diff --git a/spec/lib/gitlab/metrics/subscribers/ldap_spec.rb b/spec/lib/gitlab/metrics/subscribers/ldap_spec.rb index b81000be62a..fb822c8d779 100644 --- a/spec/lib/gitlab/metrics/subscribers/ldap_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/ldap_spec.rb @@ -101,11 +101,11 @@ RSpec.describe Gitlab::Metrics::Subscribers::Ldap, :request_store, feature_categ it "tracks LDAP request duration" do expect(transaction).to receive(:observe) - .with(:gitlab_net_ldap_duration_seconds, 0.321, { name: "open" }) + .with(:gitlab_net_ldap_duration_seconds, 0.000321, { name: "open" }) expect(transaction).to receive(:observe) - .with(:gitlab_net_ldap_duration_seconds, 0.12, { name: "search" }) + .with(:gitlab_net_ldap_duration_seconds, 0.00012, { name: "search" }) expect(transaction).to receive(:observe) - .with(:gitlab_net_ldap_duration_seconds, 5.3, { name: "search" }) + .with(:gitlab_net_ldap_duration_seconds, 0.0053, { name: "search" }) subscriber.observe_event(event_1) subscriber.observe_event(event_2) @@ -118,7 +118,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::Ldap, :request_store, feature_categ subscriber.observe_event(event_3) expect(Gitlab::SafeRequestStore[:net_ldap_count]).to eq(3) - expect(Gitlab::SafeRequestStore[:net_ldap_duration_s]).to eq(5.741) # 0.321 + 0.12 + 5.3 + expect(Gitlab::SafeRequestStore[:net_ldap_duration_s]).to eq(0.005741) # (0.321 + 0.12 + 5.3) / 1000 end end end diff --git a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb index 7f7efaffd9e..b401b7cc996 100644 --- a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Metrics::Subscribers::LoadBalancing, :request_store do +RSpec.describe Gitlab::Metrics::Subscribers::LoadBalancing, :request_store, feature_category: :pods do let(:subscriber) { described_class.new } describe '#caught_up_replica_pick' do diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb index bed43c04460..aaa274e252d 100644 --- a/spec/lib/gitlab/middleware/go_spec.rb +++ b/spec/lib/gitlab/middleware/go_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Middleware::Go do +RSpec.describe Gitlab::Middleware::Go, feature_category: :source_code_management do let(:app) { double(:app) } let(:middleware) { described_class.new(app) } let(:env) do diff --git a/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb index d1d6ac80c40..6632a8106ca 100644 --- a/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb +++ b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb @@ -2,7 +2,7 @@ require 'fast_spec_helper' -RSpec.describe ::Gitlab::Nav::TopNavMenuItem do +RSpec.describe ::Gitlab::Nav::TopNavMenuItem, feature_category: :navigation do describe '.build' do it 'builds a hash from the given args' do item = { diff --git a/spec/lib/gitlab/octokit/middleware_spec.rb b/spec/lib/gitlab/octokit/middleware_spec.rb index 7bce0788327..f7063f2c4f2 100644 --- a/spec/lib/gitlab/octokit/middleware_spec.rb +++ b/spec/lib/gitlab/octokit/middleware_spec.rb @@ -2,11 +2,11 @@ require 'spec_helper' -RSpec.describe Gitlab::Octokit::Middleware do +RSpec.describe Gitlab::Octokit::Middleware, feature_category: :importers do let(:app) { double(:app) } let(:middleware) { described_class.new(app) } - shared_examples 'Public URL' do + shared_examples 'Allowed URL' do it 'does not raise an error' do expect(app).to receive(:call).with(env) @@ -14,7 +14,7 @@ RSpec.describe Gitlab::Octokit::Middleware do end end - shared_examples 'Local URL' do + shared_examples 'Blocked URL' do it 'raises an error' do expect { middleware.call(env) }.to raise_error(Gitlab::UrlBlocker::BlockedUrlError) end @@ -24,7 +24,24 @@ RSpec.describe Gitlab::Octokit::Middleware do context 'when the URL is a public URL' do let(:env) { { url: 'https://public-url.com' } } - it_behaves_like 'Public URL' + it_behaves_like 'Allowed URL' + + context 'with failed address check' do + before do + stub_env('RSPEC_ALLOW_INVALID_URLS', 'false') + allow(Addrinfo).to receive(:getaddrinfo).and_raise(SocketError) + end + + it_behaves_like 'Blocked URL' + + context 'with disabled dns rebinding check' do + before do + stub_application_setting(dns_rebinding_protection_enabled: false) + end + + it_behaves_like 'Allowed URL' + end + end end context 'when the URL is a localhost address' do @@ -35,7 +52,7 @@ RSpec.describe Gitlab::Octokit::Middleware do stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) end - it_behaves_like 'Local URL' + it_behaves_like 'Blocked URL' end context 'when localhost requests are allowed' do @@ -43,7 +60,7 @@ RSpec.describe Gitlab::Octokit::Middleware do stub_application_setting(allow_local_requests_from_web_hooks_and_services: true) end - it_behaves_like 'Public URL' + it_behaves_like 'Allowed URL' end end @@ -55,7 +72,7 @@ RSpec.describe Gitlab::Octokit::Middleware do stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) end - it_behaves_like 'Local URL' + it_behaves_like 'Blocked URL' end context 'when local network requests are allowed' do @@ -63,7 +80,7 @@ RSpec.describe Gitlab::Octokit::Middleware do stub_application_setting(allow_local_requests_from_web_hooks_and_services: true) end - it_behaves_like 'Public URL' + it_behaves_like 'Allowed URL' end end diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb index 563c97fa2cb..daef280dbaa 100644 --- a/spec/lib/gitlab/omniauth_initializer_spec.rb +++ b/spec/lib/gitlab/omniauth_initializer_spec.rb @@ -189,7 +189,7 @@ RSpec.describe Gitlab::OmniauthInitializer do it 'passes "args" hash as symbolized hash argument' do hash_config = { 'name' => 'hash', 'args' => { 'custom' => 'format' } } - expect(devise_config).to receive(:omniauth).with(:hash, custom: 'format') + expect(devise_config).to receive(:omniauth).with(:hash, { custom: 'format' }) subject.execute([hash_config]) end @@ -197,7 +197,7 @@ RSpec.describe Gitlab::OmniauthInitializer do it 'normalizes a String strategy_class' do hash_config = { 'name' => 'hash', 'args' => { strategy_class: 'OmniAuth::Strategies::OAuth2Generic' } } - expect(devise_config).to receive(:omniauth).with(:hash, strategy_class: OmniAuth::Strategies::OAuth2Generic) + expect(devise_config).to receive(:omniauth).with(:hash, { strategy_class: OmniAuth::Strategies::OAuth2Generic }) subject.execute([hash_config]) end @@ -205,7 +205,7 @@ RSpec.describe Gitlab::OmniauthInitializer do it 'allows a class to be specified in strategy_class' do hash_config = { 'name' => 'hash', 'args' => { strategy_class: OmniAuth::Strategies::OAuth2Generic } } - expect(devise_config).to receive(:omniauth).with(:hash, strategy_class: OmniAuth::Strategies::OAuth2Generic) + expect(devise_config).to receive(:omniauth).with(:hash, { strategy_class: OmniAuth::Strategies::OAuth2Generic }) subject.execute([hash_config]) end @@ -216,26 +216,10 @@ RSpec.describe Gitlab::OmniauthInitializer do expect { subject.execute([hash_config]) }.to raise_error(NameError) end - it 'configures fail_with_empty_uid for shibboleth' do - shibboleth_config = { 'name' => 'shibboleth', 'args' => {} } - - expect(devise_config).to receive(:omniauth).with(:shibboleth, fail_with_empty_uid: true) - - subject.execute([shibboleth_config]) - end - - it 'configures remote_sign_out_handler proc for authentiq' do - authentiq_config = { 'name' => 'authentiq', 'args' => {} } - - expect(devise_config).to receive(:omniauth).with(:authentiq, remote_sign_out_handler: an_instance_of(Proc)) - - subject.execute([authentiq_config]) - end - it 'configures on_single_sign_out proc for cas3' do cas3_config = { 'name' => 'cas3', 'args' => {} } - expect(devise_config).to receive(:omniauth).with(:cas3, on_single_sign_out: an_instance_of(Proc)) + expect(devise_config).to receive(:omniauth).with(:cas3, { on_single_sign_out: an_instance_of(Proc) }) subject.execute([cas3_config]) end @@ -247,11 +231,11 @@ RSpec.describe Gitlab::OmniauthInitializer do } expect(devise_config).to receive(:omniauth).with( - :google_oauth2, - access_type: "offline", - approval_prompt: "", - client_options: { connection_opts: { request: { timeout: Gitlab::OmniauthInitializer::OAUTH2_TIMEOUT_SECONDS } } } - ) + :google_oauth2, { + access_type: "offline", + approval_prompt: "", + client_options: { connection_opts: { request: { timeout: Gitlab::OmniauthInitializer::OAUTH2_TIMEOUT_SECONDS } } } + }) subject.execute([google_config]) end @@ -263,10 +247,10 @@ RSpec.describe Gitlab::OmniauthInitializer do } expect(devise_config).to receive(:omniauth).with( - :gitlab, - client_options: { site: conf.dig('args', 'client_options', 'site') }, - authorize_params: { gl_auth_type: 'login' } - ) + :gitlab, { + client_options: { site: conf.dig('args', 'client_options', 'site') }, + authorize_params: { gl_auth_type: 'login' } + }) subject.execute([conf]) end @@ -275,9 +259,9 @@ RSpec.describe Gitlab::OmniauthInitializer do conf = { 'name' => 'gitlab' } expect(devise_config).to receive(:omniauth).with( - :gitlab, - authorize_params: { gl_auth_type: 'login' } - ) + :gitlab, { + authorize_params: { gl_auth_type: 'login' } + }) subject.execute([conf]) end @@ -288,7 +272,7 @@ RSpec.describe Gitlab::OmniauthInitializer do expect(devise_config).to receive(:omniauth).with( :gitlab, 'a', - authorize_params: { gl_auth_type: 'login' } + { authorize_params: { gl_auth_type: 'login' } } ) subject.execute([conf]) @@ -303,7 +287,7 @@ RSpec.describe Gitlab::OmniauthInitializer do expect(devise_config).to receive(:omniauth).with( :gitlab, - authorize_params: { gl_auth_type: 'login' } + { authorize_params: { gl_auth_type: 'login' } } ) subject.execute([conf]) diff --git a/spec/lib/gitlab/other_markup_spec.rb b/spec/lib/gitlab/other_markup_spec.rb index 6b24c8a8710..6b4b0e8fda6 100644 --- a/spec/lib/gitlab/other_markup_spec.rb +++ b/spec/lib/gitlab/other_markup_spec.rb @@ -47,7 +47,7 @@ RSpec.describe Gitlab::OtherMarkup do end end - def render(*args) - described_class.render(*args) + def render(...) + described_class.render(...) end end diff --git a/spec/lib/gitlab/pages/cache_control_spec.rb b/spec/lib/gitlab/pages/cache_control_spec.rb index dd15aa87441..72240f52580 100644 --- a/spec/lib/gitlab/pages/cache_control_spec.rb +++ b/spec/lib/gitlab/pages/cache_control_spec.rb @@ -13,15 +13,23 @@ RSpec.describe Gitlab::Pages::CacheControl, feature_category: :pages do end it 'clears the cache' do + cached_keys = [ + "pages_domain_for_#{type}_1_settings-hash", + "pages_domain_for_#{type}_1" + ] + + expect(::Gitlab::AppLogger) + .to receive(:info) + .with( + message: 'clear pages cache', + pages_keys: cached_keys, + pages_type: type, + pages_id: 1 + ) + expect(Rails.cache) .to receive(:delete_multi) - .with( - array_including( - [ - "pages_domain_for_#{type}_1", - "pages_domain_for_#{type}_1_settings-hash" - ] - )) + .with(cached_keys) subject.clear_cache end @@ -31,13 +39,13 @@ RSpec.describe Gitlab::Pages::CacheControl, feature_category: :pages do describe '.for_namespace' do subject(:cache_control) { described_class.for_namespace(1) } - it_behaves_like 'cache_control', 'namespace' + it_behaves_like 'cache_control', :namespace end describe '.for_domain' do subject(:cache_control) { described_class.for_domain(1) } - it_behaves_like 'cache_control', 'domain' + it_behaves_like 'cache_control', :domain end describe '#cache_key' do diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb index b1c4ffd6c29..dc32f471756 100644 --- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb +++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb @@ -308,8 +308,8 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do expect(subject).to receive(:header).with(*args, &block) end - def expect_no_header(*args, &block) - expect(subject).not_to receive(:header).with(*args) + def expect_no_header(...) + expect(subject).not_to receive(:header).with(...) end def expect_message(method) diff --git a/spec/lib/gitlab/quick_actions/command_definition_spec.rb b/spec/lib/gitlab/quick_actions/command_definition_spec.rb index 8362c07baca..53c2db8a826 100644 --- a/spec/lib/gitlab/quick_actions/command_definition_spec.rb +++ b/spec/lib/gitlab/quick_actions/command_definition_spec.rb @@ -3,7 +3,9 @@ require 'spec_helper' RSpec.describe Gitlab::QuickActions::CommandDefinition do - subject { described_class.new(:command) } + let(:mock_command) { :command } + + subject { described_class.new(mock_command) } describe "#all_names" do context "when the command has aliases" do @@ -74,7 +76,7 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do context "when the command has types" do before do - subject.types = [Issue, Commit] + subject.types = [Issue, Commit, WorkItem] end context "when the command target type is allowed" do @@ -82,6 +84,26 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do opts[:quick_action_target] = Issue.new expect(subject.available?(opts)).to be true end + + context 'when the command target type is Work Item' do + context 'when the command is not allowed' do + it "returns false" do + opts[:quick_action_target] = build(:work_item) + expect(subject.available?(opts)).to be false + end + end + + context 'when the command is allowed' do + it "returns true" do + allow_next_instance_of(WorkItem) do |work_item| + allow(work_item).to receive(:supported_quick_action_commands).and_return([mock_command]) + end + + opts[:quick_action_target] = build(:work_item) + expect(subject.available?(opts)).to be true + end + end + end end context "when the command target type is not allowed" do @@ -99,6 +121,9 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do opts[:quick_action_target] = MergeRequest.new expect(subject.available?(opts)).to be true + + opts[:quick_action_target] = build(:work_item) + expect(subject.available?(opts)).to be true end end end diff --git a/spec/lib/gitlab/redis/cache_spec.rb b/spec/lib/gitlab/redis/cache_spec.rb index 82ff8a26199..64615c4d9ad 100644 --- a/spec/lib/gitlab/redis/cache_spec.rb +++ b/spec/lib/gitlab/redis/cache_spec.rb @@ -26,5 +26,22 @@ RSpec.describe Gitlab::Redis::Cache do expect(described_class.active_support_config[:expires_in]).to eq(1.day) end + + context 'when encountering an error' do + let(:cache) { ActiveSupport::Cache::RedisCacheStore.new(**described_class.active_support_config) } + + subject { cache.read('x') } + + before do + described_class.with do |redis| + allow(redis).to receive(:get).and_raise(::Redis::CommandError) + end + end + + it 'logs error' do + expect(::Gitlab::ErrorTracking).to receive(:log_exception) + subject + end + end end end diff --git a/spec/lib/gitlab/redis/cluster_rate_limiting_spec.rb b/spec/lib/gitlab/redis/cluster_rate_limiting_spec.rb new file mode 100644 index 00000000000..3eba3233f08 --- /dev/null +++ b/spec/lib/gitlab/redis/cluster_rate_limiting_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Redis::ClusterRateLimiting, feature_category: :redis do + include_examples "redis_new_instance_shared_examples", 'cluster_rate_limiting', Gitlab::Redis::Cache +end diff --git a/spec/lib/gitlab/redis/db_load_balancing_spec.rb b/spec/lib/gitlab/redis/db_load_balancing_spec.rb new file mode 100644 index 00000000000..d633413ddec --- /dev/null +++ b/spec/lib/gitlab/redis/db_load_balancing_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Redis::DbLoadBalancing, feature_category: :scalability do + include_examples "redis_new_instance_shared_examples", 'db_load_balancing', Gitlab::Redis::SharedState + include_examples "redis_shared_examples" + + describe '#pool' do + let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } + let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } + + subject { described_class.pool } + + before do + allow(described_class).to receive(:config_file_name).and_return(config_new_format_host) + + # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists + allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(mktmpdir) + allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket) + end + + around do |example| + clear_pool + example.run + ensure + clear_pool + end + + it 'instantiates an instance of MultiStore' do + subject.with do |redis_instance| + expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore) + + expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99") + expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0") + + expect(redis_instance.instance_name).to eq('DbLoadBalancing') + end + end + + it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_db_load_balancing, + :use_primary_store_as_default_for_db_load_balancing + end + + describe '#raw_config_hash' do + it 'has a legacy default URL' do + expect(subject).to receive(:fetch_config).and_return(false) + + expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382') + end + end +end diff --git a/spec/lib/gitlab/redis/duplicate_jobs_spec.rb b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb deleted file mode 100644 index 4d46a567032..00000000000 --- a/spec/lib/gitlab/redis/duplicate_jobs_spec.rb +++ /dev/null @@ -1,84 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Redis::DuplicateJobs do - # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool - # to move away from `Sidekiq.redis` for duplicate job data. Thus, we use the - # same store configuration as the former. - let(:instance_specific_config_file) { "config/redis.shared_state.yml" } - let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" } - - include_examples "redis_shared_examples" - - describe '#pool' do - subject { described_class.pool } - - around do |example| - clear_pool - example.run - ensure - clear_pool - end - - context 'store connection settings' do - let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } - - before do - allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host) - allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket) - end - - it 'instantiates an instance of MultiStore' do - subject.with do |redis_instance| - expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore) - - expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99") - expect(redis_instance.primary_store.connection[:namespace]).to be_nil - expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0") - expect(redis_instance.secondary_store.connection[:namespace]).to eq("resque:gitlab") - - expect(redis_instance.instance_name).to eq('DuplicateJobs') - end - end - end - - # Make sure they current namespace is respected for the secondary store but omitted from the primary - context 'key namespaces' do - let(:key) { 'key' } - let(:value) { '123' } - - it 'writes keys to SharedState with no prefix, and to Queues with the "resque:gitlab:" prefix' do - subject.with do |redis_instance| - redis_instance.set(key, value) - end - - Gitlab::Redis::SharedState.with do |redis_instance| - expect(redis_instance.get(key)).to eq(value) - end - - Gitlab::Redis::Queues.with do |redis_instance| - expect(redis_instance.get("resque:gitlab:#{key}")).to eq(value) - end - end - end - - it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_duplicate_jobs, - :use_primary_store_as_default_for_duplicate_jobs - end - - describe '#raw_config_hash' do - it 'has a legacy default URL' do - expect(subject).to receive(:fetch_config) { false } - - expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382') - end - end - - describe '#store_name' do - it 'returns the name of the SharedState store' do - expect(described_class.store_name).to eq('SharedState') - end - end -end diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb index f198ba90d0a..423a7e80ead 100644 --- a/spec/lib/gitlab/redis/multi_store_spec.rb +++ b/spec/lib/gitlab/redis/multi_store_spec.rb @@ -210,7 +210,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end end - RSpec.shared_examples_for 'fallback read from the secondary store' do + RSpec.shared_examples_for 'fallback read from the non-default store' do let(:counter) { Gitlab::Metrics::NullMetric.instance } before do @@ -218,7 +218,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end it 'fallback and execute on secondary instance' do - expect(secondary_store).to receive(name).with(*expected_args).and_call_original + expect(multi_store.fallback_store).to receive(name).with(*expected_args).and_call_original subject end @@ -242,7 +242,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do context 'when fallback read from the secondary instance raises an exception' do before do - allow(secondary_store).to receive(name).with(*expected_args).and_raise(StandardError) + allow(multi_store.fallback_store).to receive(name).with(*expected_args).and_raise(StandardError) end it 'fails with exception' do @@ -283,10 +283,12 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do subject end - it 'does not execute on the secondary store' do - expect(secondary_store).not_to receive(name) + unless params[:block] + it 'does not execute on the secondary store' do + expect(secondary_store).not_to receive(name) - subject + subject + end end include_examples 'reads correct value' @@ -294,7 +296,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do context 'when reading from primary instance is raising an exception' do before do - allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError) + allow(multi_store.default_store).to receive(name).with(*expected_args).and_raise(StandardError) allow(Gitlab::ErrorTracking).to receive(:log_exception) end @@ -305,16 +307,16 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do subject end - include_examples 'fallback read from the secondary store' + include_examples 'fallback read from the non-default store' end - context 'when reading from empty primary instance' do + context 'when reading from empty default instance' do before do - # this ensures a cache miss without having to stub primary store - primary_store.flushdb + # this ensures a cache miss without having to stub the default store + multi_store.default_store.flushdb end - include_examples 'fallback read from the secondary store' + include_examples 'fallback read from the non-default store' end context 'when the command is executed within pipelined block' do @@ -344,8 +346,16 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'when block is provided' do - it 'yields to the block' do + it 'both stores yields to the block' do expect(primary_store).to receive(name).and_yield(value) + expect(secondary_store).to receive(name).and_yield(value) + + subject + end + + it 'both stores to execute' do + expect(primary_store).to receive(name).with(*expected_args).and_call_original + expect(secondary_store).to receive(name).with(*expected_args).and_call_original subject end @@ -412,7 +422,6 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do multi_store.mget(values) do |v| multi_store.sadd(skey, v) multi_store.scard(skey) - v # mget receiving block returns the last line of the block for cache-hit check end end @@ -422,9 +431,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do expect(primary_store).to receive(:send).with(:sadd, skey, %w[1 2 3]).and_call_original expect(primary_store).to receive(:send).with(:scard, skey).and_call_original - expect(secondary_store).not_to receive(:send).with(:mget, values).and_call_original - expect(secondary_store).not_to receive(:send).with(:sadd, skey, %w[1 2 3]).and_call_original - expect(secondary_store).not_to receive(:send).with(:scard, skey).and_call_original + expect(secondary_store).to receive(:send).with(:mget, values).and_call_original + expect(secondary_store).to receive(:send).with(:sadd, skey, %w[10 20 30]).and_call_original + expect(secondary_store).to receive(:send).with(:scard, skey).and_call_original subject end @@ -451,7 +460,15 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do end context 'when primary instance is default store' do - it_behaves_like 'primary instance executes block' + it 'ensures only primary instance is executing the block' do + expect(secondary_store).not_to receive(:send) + + expect(primary_store).to receive(:send).with(:mget, values).and_call_original + expect(primary_store).to receive(:send).with(:sadd, skey, %w[1 2 3]).and_call_original + expect(primary_store).to receive(:send).with(:scard, skey).and_call_original + + subject + end end context 'when secondary instance is default store' do @@ -464,9 +481,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do expect(secondary_store).to receive(:send).with(:sadd, skey, %w[10 20 30]).and_call_original expect(secondary_store).to receive(:send).with(:scard, skey).and_call_original - expect(primary_store).not_to receive(:send).with(:mget, values).and_call_original - expect(primary_store).not_to receive(:send).with(:sadd, skey, %w[10 20 30]).and_call_original - expect(primary_store).not_to receive(:send).with(:scard, skey).and_call_original + expect(primary_store).not_to receive(:send) subject end @@ -700,7 +715,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do it 'runs block on correct Redis instance' do if both_stores expect(primary_store).to receive(name).with(*expected_args).and_call_original - expect(secondary_store).not_to receive(name) + expect(secondary_store).to receive(name).with(*expected_args).and_call_original expect(primary_store).to receive(:incr).with(rvalue) expect(secondary_store).to receive(:incr).with(rvalue) diff --git a/spec/lib/gitlab/redis/rate_limiting_spec.rb b/spec/lib/gitlab/redis/rate_limiting_spec.rb index e79c070df93..d82228426f0 100644 --- a/spec/lib/gitlab/redis/rate_limiting_spec.rb +++ b/spec/lib/gitlab/redis/rate_limiting_spec.rb @@ -4,4 +4,21 @@ require 'spec_helper' RSpec.describe Gitlab::Redis::RateLimiting do include_examples "redis_new_instance_shared_examples", 'rate_limiting', Gitlab::Redis::Cache + + describe '.cache_store' do + context 'when encountering an error' do + subject { described_class.cache_store.read('x') } + + before do + described_class.with do |redis| + allow(redis).to receive(:get).and_raise(::Redis::CommandError) + end + end + + it 'logs error' do + expect(::Gitlab::ErrorTracking).to receive(:log_exception) + subject + end + end + end end diff --git a/spec/lib/gitlab/redis/repository_cache_spec.rb b/spec/lib/gitlab/redis/repository_cache_spec.rb index b11e9ebf1f3..2c167a6eb62 100644 --- a/spec/lib/gitlab/redis/repository_cache_spec.rb +++ b/spec/lib/gitlab/redis/repository_cache_spec.rb @@ -4,46 +4,33 @@ require 'spec_helper' RSpec.describe Gitlab::Redis::RepositoryCache, feature_category: :scalability do include_examples "redis_new_instance_shared_examples", 'repository_cache', Gitlab::Redis::Cache - include_examples "redis_shared_examples" - describe '#pool' do - let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } - let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } - - subject { described_class.pool } + describe '#raw_config_hash' do + it 'has a legacy default URL' do + expect(subject).to receive(:fetch_config).and_return(false) - before do - allow(described_class).to receive(:config_file_name).and_return(config_new_format_host) - allow(Gitlab::Redis::Cache).to receive(:config_file_name).and_return(config_new_format_socket) + expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6380') end + end - around do |example| - clear_pool - example.run - ensure - clear_pool + describe '.cache_store' do + it 'has a default ttl of 8 hours' do + expect(described_class.cache_store.options[:expires_in]).to eq(8.hours) end - it 'instantiates an instance of MultiStore' do - subject.with do |redis_instance| - expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore) + context 'when encountering an error' do + subject { described_class.cache_store.read('x') } - expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99") - expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0") - - expect(redis_instance.instance_name).to eq('RepositoryCache') + before do + described_class.with do |redis| + allow(redis).to receive(:get).and_raise(::Redis::CommandError) + end end - end - it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_repository_cache, - :use_primary_store_as_default_for_repository_cache - end - - describe '#raw_config_hash' do - it 'has a legacy default URL' do - expect(subject).to receive(:fetch_config).and_return(false) - - expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6380') + it 'logs error' do + expect(::Gitlab::ErrorTracking).to receive(:log_exception) + subject + end end end end diff --git a/spec/lib/gitlab/redis/sidekiq_status_spec.rb b/spec/lib/gitlab/redis/sidekiq_status_spec.rb index e7cf229b494..bbfec13e6c8 100644 --- a/spec/lib/gitlab/redis/sidekiq_status_spec.rb +++ b/spec/lib/gitlab/redis/sidekiq_status_spec.rb @@ -14,10 +14,15 @@ RSpec.describe Gitlab::Redis::SidekiqStatus do describe '#pool' do let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" } let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" } + let(:rails_root) { mktmpdir } subject { described_class.pool } before do + # Override rails root to avoid having our fixtures overwritten by `redis.yml` if it exists + allow(Gitlab::Redis::SharedState).to receive(:rails_root).and_return(rails_root) + allow(Gitlab::Redis::Queues).to receive(:rails_root).and_return(rails_root) + allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host) allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket) end diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb index 9532a30144f..caca33704dd 100644 --- a/spec/lib/gitlab/regex_spec.rb +++ b/spec/lib/gitlab/regex_spec.rb @@ -21,6 +21,7 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it_behaves_like 'project/group name chars regex' it { is_expected.not_to match('?gitlab') } it { is_expected.not_to match("Users's something") } + it { is_expected.not_to match('users/something') } end shared_examples_for 'project name regex' do @@ -28,6 +29,7 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.to match("Gitlab++") } it { is_expected.not_to match('?gitlab') } it { is_expected.not_to match("Users's something") } + it { is_expected.not_to match('users/something') } end describe '.project_name_regex' do @@ -72,8 +74,47 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space, parenthesis. It must start with letter, digit, emoji or '_'.") } end - describe '.bulk_import_namespace_path_regex' do - subject { described_class.bulk_import_namespace_path_regex } + describe '.bulk_import_destination_namespace_path_regex_message' do + subject { described_class.bulk_import_destination_namespace_path_regex_message } + + it { + is_expected + .to eq("cannot start with a non-alphanumeric character except for periods or underscores, " \ + "can contain only alphanumeric characters, forward slashes, periods, and underscores, " \ + "cannot end with a period or forward slash, and has a relative path structure " \ + "with no http protocol chars or leading or trailing forward slashes") + } + end + + describe '.bulk_import_destination_namespace_path_regex' do + subject { described_class.bulk_import_destination_namespace_path_regex } + + it { is_expected.not_to match('?gitlab') } + it { is_expected.not_to match("Users's something") } + it { is_expected.not_to match('/source') } + it { is_expected.not_to match('http:') } + it { is_expected.not_to match('https:') } + it { is_expected.not_to match('example.com/?stuff=true') } + it { is_expected.not_to match('example.com:5000/?stuff=true') } + it { is_expected.not_to match('http://gitlab.example/gitlab-org/manage/import/gitlab-migration-test') } + it { is_expected.not_to match('_good_for_me!') } + it { is_expected.not_to match('good_for+you') } + it { is_expected.not_to match('source/') } + it { is_expected.not_to match('.source/full./path') } + + it { is_expected.to match('source') } + it { is_expected.to match('.source') } + it { is_expected.to match('_source') } + it { is_expected.to match('source/full') } + it { is_expected.to match('source/full/path') } + it { is_expected.to match('.source/.full/.path') } + it { is_expected.to match('domain_namespace') } + it { is_expected.to match('gitlab-migration-test') } + it { is_expected.to match('') } # it is possible to pass an empty string for destination_namespace in bulk_import POST request + end + + describe '.bulk_import_source_full_path_regex' do + subject { described_class.bulk_import_source_full_path_regex } it { is_expected.not_to match('?gitlab') } it { is_expected.not_to match("Users's something") } @@ -87,6 +128,7 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.not_to match('good_for+you') } it { is_expected.not_to match('source/') } it { is_expected.not_to match('.source/full./path') } + it { is_expected.not_to match('') } it { is_expected.to match('source') } it { is_expected.to match('.source') } @@ -608,6 +650,7 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.to match('1.0') } it { is_expected.to match('1.0~alpha1') } it { is_expected.to match('2:4.9.5+dfsg-5+deb10u1') } + it { is_expected.to match('0.0.0-806aa143-f0bf-4f27-be65-8e4fcb745f37') } end context 'dpkg errors' do @@ -661,6 +704,22 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do end end + describe '.debian_direct_upload_filename_regex' do + subject { described_class.debian_direct_upload_filename_regex } + + it { is_expected.to match('libsample0_1.2.3~alpha2_amd64.deb') } + it { is_expected.to match('sample-dev_1.2.3~binary_amd64.deb') } + it { is_expected.to match('sample-udeb_1.2.3~alpha2_amd64.udeb') } + + it { is_expected.not_to match('sample_1.2.3~alpha2_amd64.buildinfo') } + it { is_expected.not_to match('sample_1.2.3~alpha2_amd64.changes') } + it { is_expected.not_to match('sample_1.2.3~alpha2.dsc') } + it { is_expected.not_to match('sample_1.2.3~alpha2.tar.xz') } + + # ensure right anchor + it { is_expected.not_to match('libsample0_1.2.3~alpha2_amd64.debu') } + end + describe '.helm_channel_regex' do subject { described_class.helm_channel_regex } @@ -977,21 +1036,6 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.not_to match('%2e%2e%2f1.2.3') } end - describe '.saved_reply_name_regex' do - subject { described_class.saved_reply_name_regex } - - it { is_expected.to match('test') } - it { is_expected.to match('test123') } - it { is_expected.to match('test-test') } - it { is_expected.to match('test-test_0123') } - it { is_expected.not_to match('test test') } - it { is_expected.not_to match('test-') } - it { is_expected.not_to match('/z/test_') } - it { is_expected.not_to match('.xtest_') } - it { is_expected.not_to match('.xt.est_') } - it { is_expected.not_to match('0test1') } - end - describe '.sha256_regex' do subject { described_class.sha256_regex } @@ -1045,4 +1089,93 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do it { is_expected.not_to match('random string') } it { is_expected.not_to match('12321342545356434523412341245452345623453542345234523453245') } end + + describe 'code, html blocks, or html comment blocks regex' do + context 'code blocks' do + subject { described_class::MARKDOWN_CODE_BLOCK_REGEX } + + let(:expected) { %(```code\nsome code\n\n>>>\nthat includes a multiline-blockquote\n>>>\n```) } + let(:markdown) do + <<~MARKDOWN + Regular text + + ```code + some code + + >>> + that includes a multiline-blockquote + >>> + ``` + MARKDOWN + end + + it { is_expected.to match(%(```ruby\nsomething\n```)) } + it { is_expected.not_to match(%(must start in first column ```ruby\nsomething\n```)) } + it { is_expected.not_to match(%(```ruby must be multi-line ```)) } + it { expect(subject.match(markdown)[:code]).to eq expected } + end + + context 'HTML blocks' do + subject { described_class::MARKDOWN_HTML_BLOCK_REGEX } + + let(:expected) { %(<section>\n<p>paragraph</p>\n\n>>>\nthat includes a multiline-blockquote\n>>>\n</section>) } + let(:markdown) do + <<~MARKDOWN + Regular text + + <section> + <p>paragraph</p> + + >>> + that includes a multiline-blockquote + >>> + </section> + MARKDOWN + end + + it { is_expected.to match(%(<section>\nsomething\n</section>)) } + it { is_expected.not_to match(%(must start in first column <section>\nsomething\n</section>)) } + it { is_expected.not_to match(%(<section>must be multi-line</section>)) } + it { expect(subject.match(markdown)[:html]).to eq expected } + end + + context 'HTML comment lines' do + subject { described_class::MARKDOWN_HTML_COMMENT_LINE_REGEX } + + let(:expected) { %(<!-- an HTML comment -->) } + let(:markdown) do + <<~MARKDOWN + Regular text + + <!-- an HTML comment --> + + more text + MARKDOWN + end + + it { is_expected.to match(%(<!-- single line comment -->)) } + it { is_expected.not_to match(%(<!--\nblock comment\n-->)) } + it { is_expected.not_to match(%(must start in first column <!-- comment -->)) } + it { expect(subject.match(markdown)[:html_comment_line]).to eq expected } + end + + context 'HTML comment blocks' do + subject { described_class::MARKDOWN_HTML_COMMENT_BLOCK_REGEX } + + let(:expected) { %(<!-- the start of an HTML comment\n- [ ] list item commented out\n-->) } + let(:markdown) do + <<~MARKDOWN + Regular text + + <!-- the start of an HTML comment + - [ ] list item commented out + --> + MARKDOWN + end + + it { is_expected.to match(%(<!--\ncomment\n-->)) } + it { is_expected.not_to match(%(must start in first column <!--\ncomment\n-->)) } + it { expect(subject.match(markdown)[:html_comment_block]).to eq expected } + end + end end diff --git a/spec/lib/gitlab/repository_cache/preloader_spec.rb b/spec/lib/gitlab/repository_cache/preloader_spec.rb index 71244dd41ed..e6fb0da6412 100644 --- a/spec/lib/gitlab/repository_cache/preloader_spec.rb +++ b/spec/lib/gitlab/repository_cache/preloader_spec.rb @@ -6,76 +6,51 @@ RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_redis_cachin feature_category: :source_code_management do let(:projects) { create_list(:project, 2, :repository) } let(:repositories) { projects.map(&:repository) } + let(:cache) { Gitlab::RepositoryCache.store } - before do - stub_feature_flags(use_primary_store_as_default_for_repository_cache: false) - end - - shared_examples 'preload' do - describe '#preload' do - context 'when the values are already cached' do - before do - # Warm the cache but use a different model so they are not memoized - repos = Project.id_in(projects).order(:id).map(&:repository) - - allow(repos[0]).to receive(:readme_path_gitaly).and_return('README.txt') - allow(repos[1]).to receive(:readme_path_gitaly).and_return('README.md') - - repos.map(&:exists?) - repos.map(&:readme_path) - end - - it 'prevents individual cache reads for cached methods' do - expect(cache).to receive(:read_multi).once.and_call_original - - described_class.new(repositories).preload( - %i[exists? readme_path] - ) - - expect(cache).not_to receive(:read) - expect(cache).not_to receive(:write) + describe '#preload' do + context 'when the values are already cached' do + before do + # Warm the cache but use a different model so they are not memoized + repos = Project.id_in(projects).order(:id).map(&:repository) - expect(repositories[0].exists?).to eq(true) - expect(repositories[0].readme_path).to eq('README.txt') + allow(repos[0].head_tree).to receive(:readme_path).and_return('README.txt') + allow(repos[1].head_tree).to receive(:readme_path).and_return('README.md') - expect(repositories[1].exists?).to eq(true) - expect(repositories[1].readme_path).to eq('README.md') - end + repos.map(&:exists?) + repos.map(&:readme_path) end - context 'when values are not cached' do - it 'reads and writes from cache individually' do - described_class.new(repositories).preload( - %i[exists? has_visible_content?] - ) + it 'prevents individual cache reads for cached methods' do + expect(cache).to receive(:read_multi).once.and_call_original - expect(cache).to receive(:read).exactly(4).times - expect(cache).to receive(:write).exactly(4).times + described_class.new(repositories).preload( + %i[exists? readme_path] + ) - repositories.each(&:exists?) - repositories.each(&:has_visible_content?) - end - end - end - end + expect(cache).not_to receive(:read) + expect(cache).not_to receive(:write) - context 'when use_primary_and_secondary_stores_for_repository_cache feature flag is enabled' do - let(:cache) { Gitlab::RepositoryCache.store } + expect(repositories[0].exists?).to eq(true) + expect(repositories[0].readme_path).to eq('README.txt') - before do - stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: true) + expect(repositories[1].exists?).to eq(true) + expect(repositories[1].readme_path).to eq('README.md') + end end - it_behaves_like 'preload' - end + context 'when values are not cached' do + it 'reads and writes from cache individually' do + described_class.new(repositories).preload( + %i[exists? has_visible_content?] + ) - context 'when use_primary_and_secondary_stores_for_repository_cache feature flag is disabled' do - let(:cache) { Rails.cache } + expect(cache).to receive(:read).exactly(4).times + expect(cache).to receive(:write).exactly(4).times - before do - stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false) + repositories.each(&:exists?) + repositories.each(&:has_visible_content?) + end end - - it_behaves_like 'preload' end end diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb index d41bf45f72e..6b52c315a70 100644 --- a/spec/lib/gitlab/repository_hash_cache_spec.rb +++ b/spec/lib/gitlab/repository_hash_cache_spec.rb @@ -69,35 +69,20 @@ RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do end end - shared_examples "key?" do - describe "#key?" do - subject { cache.key?(:example, "test") } + describe "#key?" do + subject { cache.key?(:example, "test") } - context "key exists" do - before do - cache.write(:example, test_hash) - end - - it { is_expected.to be(true) } + context "key exists" do + before do + cache.write(:example, test_hash) end - context "key doesn't exist" do - it { is_expected.to be(false) } - end + it { is_expected.to be(true) } end - end - - context "when both multistore FF is enabled" do - it_behaves_like "key?" - end - context "when both multistore FF is disabled" do - before do - stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false) - stub_feature_flags(use_primary_store_as_default_for_repository_cache: false) + context "key doesn't exist" do + it { is_expected.to be(false) } end - - it_behaves_like "key?" end describe "#read_members" do diff --git a/spec/lib/gitlab/search/found_blob_spec.rb b/spec/lib/gitlab/search/found_blob_spec.rb index c41a051bc42..8efbe053155 100644 --- a/spec/lib/gitlab/search/found_blob_spec.rb +++ b/spec/lib/gitlab/search/found_blob_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Search::FoundBlob do +RSpec.describe Gitlab::Search::FoundBlob, feature_category: :global_search do let(:project) { create(:project, :public, :repository) } describe 'parsing content results' do @@ -17,6 +17,7 @@ RSpec.describe Gitlab::Search::FoundBlob do expect(subject.path).to eq('CHANGELOG') expect(subject.basename).to eq('CHANGELOG') expect(subject.ref).to eq('master') + expect(subject.matched_lines_count).to be_nil expect(subject.startline).to eq(188) expect(subject.data.lines[2]).to eq(" - Feature: Replace teams with group membership\n") end diff --git a/spec/lib/gitlab/sidekiq_death_handler_spec.rb b/spec/lib/gitlab/sidekiq_death_handler_spec.rb index 434642bf3ef..9f90e985207 100644 --- a/spec/lib/gitlab/sidekiq_death_handler_spec.rb +++ b/spec/lib/gitlab/sidekiq_death_handler_spec.rb @@ -12,7 +12,7 @@ RSpec.describe Gitlab::SidekiqDeathHandler, :clean_gitlab_redis_queues do urgency :low worker_has_external_dependencies! worker_resource_boundary :cpu - feature_category :users + feature_category :user_profile end end @@ -25,7 +25,7 @@ RSpec.describe Gitlab::SidekiqDeathHandler, :clean_gitlab_redis_queues do .to receive(:increment) .with({ queue: 'test_queue', worker: 'TestWorker', urgency: 'low', external_dependencies: 'yes', - feature_category: 'users', boundary: 'cpu' }) + feature_category: 'user_profile', boundary: 'cpu' }) described_class.handler({ 'class' => 'TestWorker', 'queue' => 'test_queue' }, nil) end diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb index b6748d49739..6a515a2b8a5 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb @@ -77,7 +77,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end end - shared_examples 'with Redis cookies' do + context 'with Redis cookies' do + def with_redis(&block) + Sidekiq.redis(&block) + end + let(:cookie_key) { "#{idempotency_key}:cookie:v2" } let(:cookie) { get_redis_msgpack(cookie_key) } @@ -385,41 +389,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end end - context 'with multi-store feature flags turned on' do - def with_redis(&block) - Gitlab::Redis::DuplicateJobs.with(&block) - end - - it 'use Gitlab::Redis::DuplicateJobs.with' do - expect(Gitlab::Redis::DuplicateJobs).to receive(:with).and_call_original - expect(Sidekiq).not_to receive(:redis) - - duplicate_job.check! - end - - it_behaves_like 'with Redis cookies' - end - - context 'when both multi-store feature flags are off' do - def with_redis(&block) - Sidekiq.redis(&block) - end - - before do - stub_feature_flags(use_primary_and_secondary_stores_for_duplicate_jobs: false) - stub_feature_flags(use_primary_store_as_default_for_duplicate_jobs: false) - end - - it 'use Sidekiq.redis' do - expect(Sidekiq).to receive(:redis).and_call_original - expect(Gitlab::Redis::DuplicateJobs).not_to receive(:with) - - duplicate_job.check! - end - - it_behaves_like 'with Redis cookies' - end - describe '#scheduled?' do it 'returns false for non-scheduled jobs' do expect(duplicate_job.scheduled?).to be(false) diff --git a/spec/lib/gitlab/slash_commands/command_spec.rb b/spec/lib/gitlab/slash_commands/command_spec.rb index f4664bcfef9..7e3ff46fae5 100644 --- a/spec/lib/gitlab/slash_commands/command_spec.rb +++ b/spec/lib/gitlab/slash_commands/command_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::SlashCommands::Command do +RSpec.describe Gitlab::SlashCommands::Command, feature_category: :integrations do let(:project) { create(:project, :repository) } let(:user) { create(:user) } let(:chat_name) { double(:chat_name, user: user) } @@ -28,7 +28,7 @@ RSpec.describe Gitlab::SlashCommands::Command do it 'displays the help message' do expect(subject[:response_type]).to be(:ephemeral) expect(subject[:text]).to start_with('The specified command is not valid') - expect(subject[:text]).to match('/gitlab issue show') + expect(subject[:text]).to include('/gitlab [project name or alias] issue show <id>') end end diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb index 7d36e67ddbf..15df2cea909 100644 --- a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb +++ b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::SlashCommands::Presenters::IssueMove do let_it_be(:other_project) { create(:project) } let_it_be(:old_issue, reload: true) { create(:issue, project: project) } - let(:new_issue) { Issues::MoveService.new(project: project, current_user: user).execute(old_issue, other_project) } + let(:new_issue) { Issues::MoveService.new(container: project, current_user: user).execute(old_issue, other_project) } let(:attachment) { subject[:attachments].first } subject { described_class.new(new_issue).present(old_issue) } diff --git a/spec/lib/gitlab/slug/path_spec.rb b/spec/lib/gitlab/slug/path_spec.rb new file mode 100644 index 00000000000..9a7067e40a2 --- /dev/null +++ b/spec/lib/gitlab/slug/path_spec.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Slug::Path, feature_category: :not_owned do + describe '#generate' do + { + 'name': 'name', + 'james.atom@bond.com': 'james', + '--foobar--': 'foobar--', + '--foo_bar--': 'foo_bar--', + '--foo$^&_bar--': 'foo_bar--', + 'john@doe.com': 'john', + '-john+gitlab-ETC%.git@gmail.com': 'johngitlab-ETC', + 'this.is.git.atom.': 'this.is', + '#$%^.': 'blank', + '---.git#$.atom%@atom^.': 'blank', # use default when all characters are filtered out + '--gitlab--hey.git#$.atom%@atom^.': 'gitlab--hey' + }.each do |input, output| + it "yields a slug #{output} when given #{input}" do + slug = described_class.new(input).generate + + expect(slug).to match(/\A#{output}\z/) + end + end + end + + describe '#to_s' do + it 'presents with a cleaned slug' do + expect(described_class.new('---show-me-what-you.got.git').to_s).to match(/\Ashow-me-what-you.got\z/) + end + end +end diff --git a/spec/lib/gitlab/time_tracking_formatter_spec.rb b/spec/lib/gitlab/time_tracking_formatter_spec.rb index ab0611e6b6a..4203a76cbfb 100644 --- a/spec/lib/gitlab/time_tracking_formatter_spec.rb +++ b/spec/lib/gitlab/time_tracking_formatter_spec.rb @@ -4,7 +4,9 @@ require 'spec_helper' RSpec.describe Gitlab::TimeTrackingFormatter do describe '#parse' do - subject { described_class.parse(duration_string) } + let(:keep_zero) { false } + + subject { described_class.parse(duration_string, keep_zero: keep_zero) } context 'positive durations' do let(:duration_string) { '3h 20m' } @@ -25,6 +27,24 @@ RSpec.describe Gitlab::TimeTrackingFormatter do expect(subject).to eq(576_000) end end + + context 'when the duration is zero' do + let(:duration_string) { '0h' } + + context 'when keep_zero is false' do + it 'returns nil' do + expect(subject).to be_nil + end + end + + context 'when keep_zero is true' do + let(:keep_zero) { true } + + it 'returns zero' do + expect(subject).to eq(0) + end + end + end end describe '#output' do diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb index 05f7af7606d..0d037984799 100644 --- a/spec/lib/gitlab/url_blocker_spec.rb +++ b/spec/lib/gitlab/url_blocker_spec.rb @@ -174,6 +174,17 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do expect { subject }.to raise_error(described_class::BlockedUrlError) end + + context 'with HTTP_PROXY' do + before do + allow(Gitlab).to receive(:http_proxy_env?).and_return(true) + end + + it_behaves_like 'validates URI and hostname' do + let(:expected_uri) { import_url } + let(:expected_hostname) { nil } + end + end end context 'when domain is too long' do diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb new file mode 100644 index 00000000000..afd8fccd56c --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountCiInternalPipelinesMetric, +feature_category: :service_ping do + let_it_be(:ci_pipeline_1) { create(:ci_pipeline, source: :external) } + let_it_be(:ci_pipeline_2) { create(:ci_pipeline, source: :push) } + + let(:expected_value) { 1 } + let(:expected_query) do + 'SELECT COUNT("ci_pipelines"."id") FROM "ci_pipelines" ' \ + 'WHERE ("ci_pipelines"."source" IN (1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15) ' \ + 'OR "ci_pipelines"."source" IS NULL)' + end + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' } + + context 'on Gitlab.com' do + before do + allow(Gitlab).to receive(:com?).and_return(true) + end + + let(:expected_value) { -1 } + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' } + end +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb new file mode 100644 index 00000000000..86f54c48666 --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountIssuesCreatedManuallyFromAlertsMetric, +feature_category: :service_ping do + let_it_be(:issue) { create(:issue) } + let_it_be(:issue_with_alert) { create(:issue, :with_alert) } + + let(:expected_value) { 1 } + let(:expected_query) do + 'SELECT COUNT("issues"."id") FROM "issues" ' \ + 'INNER JOIN "alert_management_alerts" ON "alert_management_alerts"."issue_id" = "issues"."id" ' \ + 'WHERE "issues"."author_id" != 99' + end + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' } + + context 'on Gitlab.com' do + before do + allow(Gitlab).to receive(:com?).and_return(true) + end + + let(:expected_value) { -1 } + + it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' } + end +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_ml_candidates_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_ml_candidates_metric_spec.rb new file mode 100644 index 00000000000..7d7788737df --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_ml_candidates_metric_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountMlCandidatesMetric, feature_category: :mlops do + let_it_be(:candidate) { create(:ml_candidates) } + + let(:expected_value) { 1 } + let(:expected_query) { 'SELECT COUNT("ml_candidates"."id") FROM "ml_candidates"' } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_ml_experiments_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_ml_experiments_metric_spec.rb new file mode 100644 index 00000000000..887496ce39f --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_ml_experiments_metric_spec.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountMlExperimentsMetric, feature_category: :mlops do + let_it_be(:candidate) { create(:ml_experiments) } + + let(:expected_value) { 1 } + let(:expected_query) { 'SELECT COUNT("ml_experiments"."id") FROM "ml_experiments"' } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_ml_candidates_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_ml_candidates_metric_spec.rb new file mode 100644 index 00000000000..e5026ab6358 --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_ml_candidates_metric_spec.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectsWithMlCandidatesMetric, + feature_category: :mlops do + let_it_be(:project_without_candidates) { create(:project, :repository) } + let_it_be(:candidate) { create(:ml_candidates) } + let_it_be(:another_candidate) { create(:ml_candidates, experiment: candidate.experiment) } + + let(:expected_value) { 1 } + let(:expected_query) do + 'SELECT COUNT(DISTINCT "ml_experiments"."ml_experiments.project_id") FROM "ml_experiments" WHERE ' \ + '(EXISTS (SELECT 1 FROM "ml_candidates" WHERE ("ml_experiments"."id" = "ml_candidates"."experiment_id")))' + end + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_ml_experiments_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_ml_experiments_metric_spec.rb new file mode 100644 index 00000000000..829245f785b --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_ml_experiments_metric_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectsWithMlExperimentsMetric, + feature_category: :mlops do + let_it_be(:project_without_experiment) { create(:project, :repository) } + let_it_be(:experiment) { create(:ml_experiments) } + let_it_be(:another_experiment) { create(:ml_experiments, project: experiment.project) } + + let(:expected_value) { 1 } + let(:expected_query) { 'SELECT COUNT(DISTINCT "ml_experiments"."project_id") FROM "ml_experiments"' } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_monitor_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_monitor_enabled_metric_spec.rb new file mode 100644 index 00000000000..d917dccd2b0 --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_projects_with_monitor_enabled_metric_spec.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountProjectsWithMonitorEnabledMetric, + feature_category: :metrics do + let_it_be(:projects) { create_list(:project, 3) } + + let(:expected_value) { 2 } + let(:expected_query) do + 'SELECT COUNT("project_features"."id") FROM "project_features" WHERE "project_features"."monitor_access_level" != 0' + end + + before_all do + # Monitor feature cannot have public visibility level. Therefore `ProjectFeature::PUBLIC` is missing here + projects[0].project_feature.update!(monitor_access_level: ProjectFeature::DISABLED) + projects[1].project_feature.update!(monitor_access_level: ProjectFeature::PRIVATE) + projects[2].project_feature.update!(monitor_access_level: ProjectFeature::ENABLED) + end + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_with_ml_candidates_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_with_ml_candidates_metric_spec.rb new file mode 100644 index 00000000000..b25d61d0bd2 --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_with_ml_candidates_metric_spec.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersWithMlCandidatesMetric, feature_category: :mlops do + let_it_be(:user_without_candidates) { create(:user) } + let_it_be(:candidate) { create(:ml_candidates) } + let_it_be(:another_candidate) { create(:ml_candidates, user: candidate.user) } + + let(:expected_value) { 1 } + let(:expected_query) { 'SELECT COUNT(DISTINCT "ml_candidates"."user_id") FROM "ml_candidates"' } + + it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' } +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb new file mode 100644 index 00000000000..ed35b2c8cde --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::IncomingEmailEncryptedSecretsEnabledMetric, +feature_category: :service_ping do + it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do + let(:expected_value) { ::Gitlab::IncomingEmail.encrypted_secrets.active? } + end +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/jira_active_integrations_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/jira_active_integrations_metric_spec.rb new file mode 100644 index 00000000000..104fd18ba2d --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/jira_active_integrations_metric_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::JiraActiveIntegrationsMetric, + feature_category: :integrations do + let(:options) { { deployment_type: 'cloud', series: 0 } } + let(:integration_attributes) { { active: true, deployment_type: 'cloud' } } + let(:expected_value) { 3 } + let(:expected_query) do + 'SELECT COUNT("integrations"."id") FROM "integrations" ' \ + 'INNER JOIN "jira_tracker_data" ON "jira_tracker_data"."integration_id" = "integrations"."id" ' \ + 'WHERE "integrations"."type_new" = \'Integrations::Jira\' AND "integrations"."active" = TRUE ' \ + 'AND "jira_tracker_data"."deployment_type" = 2' + end + + before do + create_list :jira_integration, 3, integration_attributes + + create :jira_integration, integration_attributes.merge(active: false) + create :jira_integration, integration_attributes.merge(deployment_type: 'server') + end + + it_behaves_like 'a correct instrumented metric value and query', + { options: { deployment_type: 'cloud' }, time_frame: 'all' } + + it "raises an exception if option is not present" do + expect do + described_class.new(options: options.except(:deployment_type), time_frame: 'all') + end.to raise_error(ArgumentError, %r{deployment_type .* must be one of}) + end + + it "raises an exception if option has invalid value" do + expect do + options[:deployment_type] = 'cloood' + described_class.new(options: options, time_frame: 'all') + end.to raise_error(ArgumentError, %r{deployment_type .* must be one of}) + end +end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb new file mode 100644 index 00000000000..d602eae3159 --- /dev/null +++ b/spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ServiceDeskEmailEncryptedSecretsEnabledMetric, +feature_category: :service_ping do + it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do + let(:expected_value) { ::Gitlab::ServiceDeskEmail.encrypted_secrets.active? } + end +end diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb index 83a4ea8e948..4f647c2700a 100644 --- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb +++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do +RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator, feature_category: :service_ping do include UsageDataHelpers before do @@ -43,9 +43,9 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do context 'joined relations' do context 'counted attribute comes from source relation' do it_behaves_like 'name suggestion' do - # corresponding metric is collected with count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id) - let(:key_path) { 'counts.issues_created_manually_from_alerts' } - let(:name_suggestion) { /count_<adjective describing: '\(issues\.author_id != \d+\)'>_issues_<with>_alert_management_alerts/ } + # corresponding metric is collected with distinct_count(Release.with_milestones, :author_id) + let(:key_path) { 'usage_activity_by_stage.release.releases_with_milestones' } + let(:name_suggestion) { /count_distinct_author_id_from_releases_<with>_milestone_releases/ } end end end diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb index 7a37a31b195..730c05b7dcb 100644 --- a/spec/lib/gitlab/usage/service_ping_report_spec.rb +++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_caching do +RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_caching, feature_category: :service_ping do include UsageDataHelpers let(:usage_data) { { uuid: "1111", counts: { issue: 0 } }.deep_stringify_keys } @@ -171,14 +171,25 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c let(:metric_definitions) { ::Gitlab::Usage::MetricDefinition.definitions } it 'generates queries that match collected data', :aggregate_failures do - message = "Expected %{query} result to match %{value} for %{key_path} metric" + message = "Expected %{query} result to match %{value} for %{key_path} metric (got %{payload_value} instead)" metrics_queries_with_values.each do |key_path, query, value| - value = type_cast_to_defined_type(value, metric_definitions[key_path.join('.')]) + metric_definition = metric_definitions[key_path.join('.')] + + # Skip broken metrics since they are usually overriden to return -1 + next if metric_definition&.attributes&.fetch(:status) == 'broken' + + value = type_cast_to_defined_type(value, metric_definition) + payload_value = service_ping_payload.dig(*key_path) expect(value).to( - eq(service_ping_payload.dig(*key_path)), - message % { query: query, value: (value || 'NULL'), key_path: key_path.join('.') } + eq(payload_value), + message % { + query: query, + value: (value || 'NULL'), + payload_value: payload_value, + key_path: key_path.join('.') + } ) end end diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb index f1115a8813d..11c6ea2fc9d 100644 --- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb @@ -30,7 +30,6 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do end it_behaves_like 'Snowplow event tracking with RedisHLL context' do - let(:feature_flag_name) { :route_hll_to_snowplow } let(:category) { described_class.to_s } let(:action) { 'ci_templates_unique' } let(:namespace) { project.namespace } diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb index bfbabd858f0..f8a4603c1f8 100644 --- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb @@ -65,20 +65,4 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red end end end - - it 'can return the count of actions per user deduplicated' do - described_class.track_web_ide_edit_action(author: user1, project: project) - described_class.track_live_preview_edit_action(author: user1, project: project) - described_class.track_snippet_editor_edit_action(author: user1, project: project) - described_class.track_sfe_edit_action(author: user1, project: project) - described_class.track_web_ide_edit_action(author: user2, time: time - 2.days, project: project) - described_class.track_web_ide_edit_action(author: user3, time: time - 3.days, project: project) - described_class.track_live_preview_edit_action(author: user2, time: time - 2.days, project: project) - described_class.track_live_preview_edit_action(author: user3, time: time - 3.days, project: project) - described_class.track_snippet_editor_edit_action(author: user3, time: time - 3.days, project: project) - described_class.track_sfe_edit_action(author: user3, time: time - 3.days, project: project) - - expect(described_class.count_edit_using_editor(date_from: time, date_to: Date.today)).to eq(1) - expect(described_class.count_edit_using_editor(date_from: time - 5.days, date_to: Date.tomorrow)).to eq(3) - end end diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb index 1d980c48c72..f955fd265e5 100644 --- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb @@ -64,7 +64,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s "name" => "ce_event", "redis_slot" => "analytics", "category" => "analytics", - "expiry" => 84, "aggregation" => "weekly" } end @@ -106,13 +105,13 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s let(:known_events) do [ - { name: weekly_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "weekly", feature_flag: feature }, - { name: daily_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "daily" }, + { name: weekly_event, redis_slot: "analytics", category: analytics_category, aggregation: "weekly", feature_flag: feature }, + { name: daily_event, redis_slot: "analytics", category: analytics_category, aggregation: "daily" }, { name: category_productivity_event, redis_slot: "analytics", category: productivity_category, aggregation: "weekly" }, { name: compliance_slot_event, redis_slot: "compliance", category: compliance_category, aggregation: "weekly" }, { name: no_slot, category: global_category, aggregation: "daily" }, { name: different_aggregation, category: global_category, aggregation: "monthly" }, - { name: context_event, category: other_category, expiry: 6, aggregation: 'weekly' } + { name: context_event, category: other_category, aggregation: 'weekly' } ].map(&:with_indifferent_access) end @@ -196,7 +195,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s it 'tracks events with multiple values' do values = [entity1, entity2] - expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_{analytics}_contribution/, value: values, expiry: 84.days) + expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_{analytics}_contribution/, value: values, + expiry: described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH) described_class.track_event(:g_analytics_contribution, values: values) end @@ -233,20 +233,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s end context 'for weekly events' do - it 'sets the keys in Redis to expire automatically after the given expiry time' do - described_class.track_event("g_analytics_contribution", values: entity1) - - Gitlab::Redis::SharedState.with do |redis| - keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a - expect(keys).not_to be_empty - - keys.each do |key| - expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks) - end - end - end - - it 'sets the keys in Redis to expire automatically after 6 weeks by default' do + it 'sets the keys in Redis to expire' do described_class.track_event("g_compliance_dashboard", values: entity1) Gitlab::Redis::SharedState.with do |redis| @@ -254,27 +241,14 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s expect(keys).not_to be_empty keys.each do |key| - expect(redis.ttl(key)).to be_within(5.seconds).of(6.weeks) + expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH) end end end end context 'for daily events' do - it 'sets the keys in Redis to expire after the given expiry time' do - described_class.track_event("g_analytics_search", values: entity1) - - Gitlab::Redis::SharedState.with do |redis| - keys = redis.scan_each(match: "*-g_{analytics}_search").to_a - expect(keys).not_to be_empty - - keys.each do |key| - expect(redis.ttl(key)).to be_within(5.seconds).of(84.days) - end - end - end - - it 'sets the keys in Redis to expire after 29 days by default' do + it 'sets the keys in Redis to expire' do described_class.track_event("no_slot", values: entity1) Gitlab::Redis::SharedState.with do |redis| @@ -282,7 +256,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s expect(keys).not_to be_empty keys.each do |key| - expect(redis.ttl(key)).to be_within(5.seconds).of(29.days) + expect(redis.ttl(key)).to be_within(5.seconds).of(described_class::DEFAULT_DAILY_KEY_EXPIRY_LENGTH) end end end @@ -302,7 +276,9 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s it 'tracks events with multiple values' do values = [entity1, entity2] - expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_{analytics}_contribution/, value: values, expiry: 84.days) + expect(Gitlab::Redis::HLL).to receive(:add).with(key: /g_{analytics}_contribution/, + value: values, + expiry: described_class::DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH) described_class.track_event_in_context(:g_analytics_contribution, values: values, context: default_context) end diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb index 032a5e78385..33e0d446fca 100644 --- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb @@ -284,6 +284,16 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git end end + context 'for Issue design comment removed actions' do + it_behaves_like 'daily tracked issuable snowplow and service ping events with project' do + let(:action) { described_class::ISSUE_DESIGN_COMMENT_REMOVED } + + def track_action(params) + described_class.track_issue_design_comment_removed_action(**params) + end + end + end + it 'can return the count of actions per user deduplicated', :aggregate_failures do described_class.track_issue_title_changed_action(author: user1, project: project) described_class.track_issue_description_changed_action(author: user1, project: project) diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb index 9a1ffd8d01d..42aa84c2c3e 100644 --- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb @@ -70,8 +70,8 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl let(:namespace) { project.namespace.reload } let(:user) { project.creator } let(:feature_flag_name) { :route_hll_to_snowplow_phase2 } - let(:label) { 'redis_hll_counters.code_review.i_code_review_create_mr_monthly' } - let(:property) { described_class::MR_CREATE_ACTION } + let(:label) { 'redis_hll_counters.code_review.i_code_review_user_create_mr_monthly' } + let(:property) { described_class::MR_USER_CREATE_ACTION } end end diff --git a/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb index b0e5bd18b66..d79fa66b983 100644 --- a/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/web_ide_counter_spec.rb @@ -34,46 +34,17 @@ RSpec.describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_sha it_behaves_like 'counter examples', 'pipelines' end - describe 'previews counter' do - let(:setting_enabled) { true } - - before do - stub_application_setting(web_ide_clientside_preview_enabled: setting_enabled) - end - - context 'when web ide clientside preview is enabled' do - it_behaves_like 'counter examples', 'previews' - end - - context 'when web ide clientside preview is not enabled' do - let(:setting_enabled) { false } - - it 'does not increment the counter' do - redis_key = 'WEB_IDE_PREVIEWS_COUNT' - expect(described_class.total_count(redis_key)).to eq(0) - - 2.times { described_class.increment_previews_count } - - expect(described_class.total_count(redis_key)).to eq(0) - end - end - end - describe '.totals' do commits = 5 merge_requests = 3 views = 2 - previews = 4 terminals = 1 pipelines = 2 before do - stub_application_setting(web_ide_clientside_preview_enabled: true) - commits.times { described_class.increment_commits_count } merge_requests.times { described_class.increment_merge_requests_count } views.times { described_class.increment_views_count } - previews.times { described_class.increment_previews_count } terminals.times { described_class.increment_terminals_count } pipelines.times { described_class.increment_pipelines_count } end @@ -83,7 +54,6 @@ RSpec.describe Gitlab::UsageDataCounters::WebIdeCounter, :clean_gitlab_redis_sha web_ide_commits: commits, web_ide_views: views, web_ide_merge_requests: merge_requests, - web_ide_previews: previews, web_ide_terminals: terminals ) end diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb index 34f8e5b2a2f..6391b003096 100644 --- a/spec/lib/gitlab/usage_data_metrics_spec.rb +++ b/spec/lib/gitlab/usage_data_metrics_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::UsageDataMetrics, :with_license do +RSpec.describe Gitlab::UsageDataMetrics, :with_license, feature_category: :service_ping do describe '.uncached_data' do subject { described_class.uncached_data } diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index 592ac280d32..5325ef5b5dd 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -557,9 +557,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic expect(count_data[:issues_using_zoom_quick_actions]).to eq(3) expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2) expect(count_data[:incident_issues]).to eq(4) - expect(count_data[:issues_created_gitlab_alerts]).to eq(1) expect(count_data[:issues_created_from_alerts]).to eq(3) - expect(count_data[:issues_created_manually_from_alerts]).to eq(1) expect(count_data[:alert_bot_incident_issues]).to eq(4) expect(count_data[:clusters_enabled]).to eq(6) expect(count_data[:project_clusters_enabled]).to eq(4) @@ -739,7 +737,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled) expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled) expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled) - expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?) expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?) expect(subject[:gitpod_enabled]).to eq(Gitlab::CurrentSettings.gitpod_enabled?) end @@ -1102,8 +1099,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic { action_monthly_active_users_web_ide_edit: 2, action_monthly_active_users_sfe_edit: 2, - action_monthly_active_users_snippet_editor_edit: 2, - action_monthly_active_users_ide_edit: 3 + action_monthly_active_users_snippet_editor_edit: 2 } ) end @@ -1138,20 +1134,4 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic expect(result).to be_nil end end - - context 'on Gitlab.com' do - before do - allow(Gitlab).to receive(:com?).and_return(true) - end - - describe '.system_usage_data' do - subject { described_class.system_usage_data } - - it 'returns fallback value for disabled metrics' do - expect(subject[:counts][:ci_internal_pipelines]).to eq(Gitlab::Utils::UsageData::FALLBACK) - expect(subject[:counts][:issues_created_gitlab_alerts]).to eq(Gitlab::Utils::UsageData::FALLBACK) - expect(subject[:counts][:issues_created_manually_from_alerts]).to eq(Gitlab::Utils::UsageData::FALLBACK) - end - end - end end diff --git a/spec/lib/gitlab/utils/email_spec.rb b/spec/lib/gitlab/utils/email_spec.rb new file mode 100644 index 00000000000..d7a881d8655 --- /dev/null +++ b/spec/lib/gitlab/utils/email_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' +require 'rspec-parameterized' + +RSpec.describe Gitlab::Utils::Email, feature_category: :service_desk do + using RSpec::Parameterized::TableSyntax + + describe '.obfuscated_email' do + where(:input, :output) do + 'alex@gitlab.com' | 'al**@g*****.com' + 'alex@gl.co.uk' | 'al**@g****.uk' + 'a@b.c' | 'a@b.c' + 'q@example.com' | 'q@e******.com' + 'q@w.' | 'q@w.' + 'a@b' | 'a@b' + 'no mail' | 'no mail' + end + + with_them do + it { expect(described_class.obfuscated_email(input)).to eq(output) } + end + + context 'when deform is active' do + where(:input, :output) do + 'alex@gitlab.com' | 'al*****@g*****.c**' + 'alex@gl.co.uk' | 'al*****@g*****.u**' + 'a@b.c' | 'aa*****@b*****.c**' + 'qqwweerrttyy@example.com' | 'qq*****@e*****.c**' + 'getsuperfancysupport@paywhatyouwant.accounting' | 'ge*****@p*****.a**' + 'q@example.com' | 'qq*****@e*****.c**' + 'q@w.' | 'q@w.' + 'a@b' | 'a@b' + 'no mail' | 'no mail' + end + + with_them do + it { expect(described_class.obfuscated_email(input, deform: true)).to eq(output) } + end + end + end +end diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb index 80b2ec63af9..102d608072b 100644 --- a/spec/lib/gitlab/utils_spec.rb +++ b/spec/lib/gitlab/utils_spec.rb @@ -7,7 +7,8 @@ RSpec.describe Gitlab::Utils do delegate :to_boolean, :boolean_to_yes_no, :slugify, :which, :ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes, - :append_path, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!, :decode_path, :ms_to_round_sec, :check_allowed_absolute_path_and_path_traversal!, to: :described_class + :append_path, :remove_leading_slashes, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!, + :decode_path, :ms_to_round_sec, :check_allowed_absolute_path_and_path_traversal!, to: :described_class describe '.check_path_traversal!' do it 'detects path traversal in string without any separators' do @@ -378,6 +379,23 @@ RSpec.describe Gitlab::Utils do end end + describe '.remove_leading_slashes' do + where(:str, :result) do + '/foo/bar' | 'foo/bar' + '//foo/bar' | 'foo/bar' + '/foo/bar/' | 'foo/bar/' + 'foo/bar' | 'foo/bar' + '' | '' + nil | '' + end + + with_them do + it 'removes leading slashes' do + expect(remove_leading_slashes(str)).to eq(result) + end + end + end + describe '.ensure_utf8_size' do context 'string is has less bytes than expected' do it 'backfills string with null characters' do diff --git a/spec/lib/initializer_connections_spec.rb b/spec/lib/initializer_connections_spec.rb index 4ca283c4f22..e69aa0aa821 100644 --- a/spec/lib/initializer_connections_spec.rb +++ b/spec/lib/initializer_connections_spec.rb @@ -3,15 +3,20 @@ require 'spec_helper' RSpec.describe InitializerConnections do - describe '.with_disabled_database_connections', :reestablished_active_record_base do + describe '.raise_if_new_database_connection', :reestablished_active_record_base do + before do + ActiveRecord::Base.connection_handler.clear_active_connections! + ActiveRecord::Base.connection_handler.flush_idle_connections! + end + def block_with_database_call - described_class.with_disabled_database_connections do + described_class.raise_if_new_database_connection do Project.first end end def block_with_error - described_class.with_disabled_database_connections do + described_class.raise_if_new_database_connection do raise "oops, an error" end end @@ -20,6 +25,12 @@ RSpec.describe InitializerConnections do expect { block_with_database_call }.to raise_error(/Database connection should not be called during initializer/) end + it 'prevents any database connection re-use within the block' do + Project.connection # establish a connection first, it will be used inside the block + + expect { block_with_database_call }.to raise_error(/Database connection should not be called during initializer/) + end + it 'does not prevent database connection if SKIP_RAISE_ON_INITIALIZE_CONNECTIONS is set' do stub_env('SKIP_RAISE_ON_INITIALIZE_CONNECTIONS', '1') @@ -33,31 +44,34 @@ RSpec.describe InitializerConnections do end it 'restores original connection handler' do - # rubocop:disable Database/MultipleDatabases original_handler = ActiveRecord::Base.connection_handler expect { block_with_database_call }.to raise_error(/Database connection should not be called during initializer/) expect(ActiveRecord::Base.connection_handler).to eq(original_handler) - # rubocop:enabled Database/MultipleDatabases end it 'restores original connection handler even there is an error' do - # rubocop:disable Database/MultipleDatabases original_handler = ActiveRecord::Base.connection_handler expect { block_with_error }.to raise_error(/an error/) expect(ActiveRecord::Base.connection_handler).to eq(original_handler) - # rubocop:enabled Database/MultipleDatabases end - it 'raises if any new connection_pools are established in the block' do + it 'does not raise if connection_pool is retrieved in the block' do + # connection_pool, connection_db_config doesn't connect to database, so it's OK + expect do + described_class.raise_if_new_database_connection do + ApplicationRecord.connection_pool + end + end.not_to raise_error + expect do - described_class.with_disabled_database_connections do - ApplicationRecord.connects_to database: { writing: :main, reading: :main } + described_class.raise_if_new_database_connection do + Ci::ApplicationRecord.connection_pool end - end.to raise_error(/Unxpected connection_pools/) + end.not_to raise_error end end end diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb index 5f405e71d79..43c3e4d67e5 100644 --- a/spec/lib/marginalia_spec.rb +++ b/spec/lib/marginalia_spec.rb @@ -70,7 +70,7 @@ RSpec.describe 'Marginalia spec' do end before do - skip_if_multiple_databases_not_setup + skip_if_multiple_databases_not_setup(:ci) end it 'generates a query that includes the component and value' do diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb index 569e6a3a7c6..82eede96deb 100644 --- a/spec/lib/object_storage/direct_upload_spec.rb +++ b/spec/lib/object_storage/direct_upload_spec.rb @@ -272,7 +272,7 @@ RSpec.describe ObjectStorage::DirectUpload do it 'uses only strings in query parameters' do expect(direct_upload.send(:connection)).to receive(:signed_url).at_least(:once) do |params| if params[:query] - expect(params[:query].keys.all? { |key| key.is_a?(String) }).to be_truthy + expect(params[:query].keys.all?(String)).to be_truthy end end diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb index fc768bdcb82..aeaf28c7e7d 100644 --- a/spec/lib/peek/views/active_record_spec.rb +++ b/spec/lib/peek/views/active_record_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Peek::Views::ActiveRecord, :request_store do +RSpec.describe Peek::Views::ActiveRecord, :request_store, feature_category: :database do subject { Peek.views.find { |v| v.instance_of?(Peek::Views::ActiveRecord) } } let(:connection_replica) { double(:connection_replica) } diff --git a/spec/lib/release_highlights/validator/entry_spec.rb b/spec/lib/release_highlights/validator/entry_spec.rb index b8b745ac8cd..63b753bd871 100644 --- a/spec/lib/release_highlights/validator/entry_spec.rb +++ b/spec/lib/release_highlights/validator/entry_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ReleaseHighlights::Validator::Entry do +RSpec.describe ReleaseHighlights::Validator::Entry, type: :model, feature_category: :onboarding do subject(:entry) { described_class.new(document.root.children.first) } let(:document) { YAML.parse(File.read(yaml_path)) } @@ -22,34 +22,26 @@ RSpec.describe ReleaseHighlights::Validator::Entry do context 'with an invalid entry' do let(:yaml_path) { 'spec/fixtures/whats_new/invalid.yml' } - it 'returns line numbers in errors' do - subject.valid? - - expect(entry.errors[:available_in].first).to match('(line 6)') - end + it { is_expected.to be_invalid } end context 'with a blank entry' do - it 'validate presence of name, description and stage' do - subject.valid? - - expect(subject.errors[:name]).not_to be_empty - expect(subject.errors[:description]).not_to be_empty - expect(subject.errors[:stage]).not_to be_empty - expect(subject.errors[:available_in]).not_to be_empty - end - - it 'validates boolean value of "self-managed" and "gitlab-com"' do - allow(entry).to receive(:value_for).with(:'self-managed').and_return('nope') - allow(entry).to receive(:value_for).with(:'gitlab-com').and_return('yerp') - - subject.valid? - - expect(subject.errors[:'self-managed']).to include(/must be a boolean/) - expect(subject.errors[:'gitlab-com']).to include(/must be a boolean/) - end - - it 'validates URI of "url" and "image_url"' do + it { is_expected.to validate_presence_of(:name).with_message(/can't be blank \(line [0-9]+\)/) } + it { is_expected.to validate_presence_of(:description).with_message(/can't be blank/) } + it { is_expected.to validate_presence_of(:stage).with_message(/can't be blank/) } + it { is_expected.to validate_presence_of(:self_managed).with_message(/must be a boolean/) } + it { is_expected.to validate_presence_of(:gitlab_com).with_message(/must be a boolean/) } + it { is_expected.to allow_value(nil).for(:image_url) } + + it { + is_expected.to validate_presence_of(:available_in) + .with_message(/must be one of \["Free", "Premium", "Ultimate"\]/) + } + + it { is_expected.to validate_presence_of(:published_at).with_message(/must be valid Date/) } + it { is_expected.to validate_numericality_of(:release).with_message(/is not a number/) } + + it 'validates URI of "documentation_link" and "image_url"' do stub_env('RSPEC_ALLOW_INVALID_URLS', 'false') allow(entry).to receive(:value_for).with(:image_url).and_return('https://foobar.x/images/ci/gitlab-ci-cd-logo_2x.png') allow(entry).to receive(:value_for).with(:documentation_link).and_return('') @@ -60,16 +52,8 @@ RSpec.describe ReleaseHighlights::Validator::Entry do expect(subject.errors[:image_url]).to include(/is blocked: Host cannot be resolved or invalid/) end - it 'validates release is numerical' do - allow(entry).to receive(:value_for).with(:release).and_return('one') - - subject.valid? - - expect(subject.errors[:release]).to include(/is not a number/) - end - it 'validates published_at is a date' do - allow(entry).to receive(:value_for).with(:published_at).and_return('christmas day') + allow(entry).to receive(:published_at).and_return('christmas day') subject.valid? @@ -77,7 +61,7 @@ RSpec.describe ReleaseHighlights::Validator::Entry do end it 'validates available_in are included in list' do - allow(entry).to receive(:value_for).with(:available_in).and_return(['ALL']) + allow(entry).to receive(:available_in).and_return(['ALL']) subject.valid? diff --git a/spec/lib/release_highlights/validator_spec.rb b/spec/lib/release_highlights/validator_spec.rb index dd1b3aa4803..7cfeffb095a 100644 --- a/spec/lib/release_highlights/validator_spec.rb +++ b/spec/lib/release_highlights/validator_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ReleaseHighlights::Validator do +RSpec.describe ReleaseHighlights::Validator, feature_category: :experimentation_adoption do let(:validator) { described_class.new(file: yaml_path) } let(:yaml_path) { 'spec/fixtures/whats_new/valid.yml' } let(:invalid_yaml_path) { 'spec/fixtures/whats_new/invalid.yml' } diff --git a/spec/lib/service_ping/build_payload_spec.rb b/spec/lib/service_ping/build_payload_spec.rb index b10c9fd5bc0..6c37168f5a0 100644 --- a/spec/lib/service_ping/build_payload_spec.rb +++ b/spec/lib/service_ping/build_payload_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ServicePing::BuildPayload do +RSpec.describe ServicePing::BuildPayload, feature_category: :service_ping do describe '#execute', :without_license do subject(:service_ping_payload) { described_class.new.execute } diff --git a/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb b/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb deleted file mode 100644 index 4ae29f28f3a..00000000000 --- a/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb +++ /dev/null @@ -1,79 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Sidebars::Projects::Menus::LearnGitlabMenu do - let_it_be(:project) { build(:project) } - let_it_be(:learn_gitlab_enabled) { true } - - let(:context) do - Sidebars::Projects::Context.new( - current_user: nil, - container: project, - learn_gitlab_enabled: learn_gitlab_enabled - ) - end - - subject { described_class.new(context) } - - it 'does not contain any sub menu' do - expect(subject.has_items?).to be false - end - - describe '#nav_link_html_options' do - let_it_be(:data_tracking) do - { - class: 'home', - data: { - track_label: 'learn_gitlab' - } - } - end - - specify do - expect(subject.nav_link_html_options).to eq(data_tracking) - end - end - - describe '#render?' do - context 'when learn gitlab experiment is enabled' do - it 'returns true' do - expect(subject.render?).to eq true - end - end - - context 'when learn gitlab experiment is disabled' do - let(:learn_gitlab_enabled) { false } - - it 'returns false' do - expect(subject.render?).to eq false - end - end - end - - describe '#has_pill?' do - context 'when learn gitlab experiment is enabled' do - it 'returns true' do - expect(subject.has_pill?).to eq true - end - end - - context 'when learn gitlab experiment is disabled' do - let(:learn_gitlab_enabled) { false } - - it 'returns false' do - expect(subject.has_pill?).to eq false - end - end - end - - describe '#pill_count' do - it 'returns pill count' do - expect_next_instance_of(Onboarding::Completion) do |onboarding| - expect(onboarding).to receive(:percentage).and_return(20) - end - - expect(subject.pill_count).to eq '20%' - end - end -end diff --git a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb index e7aa2b7edca..40ca2107698 100644 --- a/spec/lib/sidebars/projects/menus/repository_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb @@ -48,20 +48,13 @@ RSpec.describe Sidebars::Projects::Menus::RepositoryMenu, feature_category: :sou ref_type: ref_type) end - where(:feature_flag_enabled, :ref_type, :link) do - true | nil | lazy { "#{route}?ref_type=heads" } - true | 'heads' | lazy { "#{route}?ref_type=heads" } - true | 'tags' | lazy { "#{route}?ref_type=tags" } - false | nil | lazy { route } - false | 'heads' | lazy { route } - false | 'tags' | lazy { route } + where(:ref_type, :link) do + nil | lazy { "#{route}?ref_type=heads" } + 'heads' | lazy { "#{route}?ref_type=heads" } + 'tags' | lazy { "#{route}?ref_type=tags" } end with_them do - before do - stub_feature_flags(use_ref_type_parameter: feature_flag_enabled) - end - it 'has a link with the fully qualifed ref route' do expect(subject).to eq(link) end diff --git a/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb b/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb index b3251a54178..8941c11006e 100644 --- a/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb +++ b/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb @@ -3,10 +3,55 @@ require 'spec_helper' RSpec.describe Sidebars::YourWork::Menus::MergeRequestsMenu, feature_category: :navigation do - let(:user) { create(:user) } + let_it_be(:user) { create(:user) } + let(:context) { Sidebars::Context.new(current_user: user, container: nil) } subject { described_class.new(context) } include_examples 'menu item shows pill based on count', :assigned_open_merge_requests_count + + describe 'submenu items' do + using RSpec::Parameterized::TableSyntax + + where(:order, :title, :key) do + 0 | 'Assigned' | :assigned + 1 | 'Review requests' | :review_requested + end + + with_them do + let(:item) { subject.renderable_items[order] } + + it 'renders items in the right order' do + expect(item.title).to eq title + end + + context 'when there are no MR counts' do + before do + allow(user).to receive(:assigned_open_merge_requests_count).and_return(0) + allow(user).to receive(:review_requested_open_merge_requests_count).and_return(0) + end + + it 'shows a pill even though count is zero' do + expect(item.has_pill?).to eq true + expect(item.pill_count).to eq 0 + end + end + + context 'when there are MR counts' do + let(:non_zero_counts) { { assigned: 2, review_requested: 3 } } + + before do + allow(user).to receive(:assigned_open_merge_requests_count).and_return(non_zero_counts[:assigned]) + allow(user).to receive(:review_requested_open_merge_requests_count) + .and_return(non_zero_counts[:review_requested]) + end + + it 'shows a pill with the correct count' do + expect(item.has_pill?).to eq true + expect(item.pill_count).to eq non_zero_counts[key] + end + end + end + end end diff --git a/spec/lib/slack_markdown_sanitizer_spec.rb b/spec/lib/slack_markdown_sanitizer_spec.rb new file mode 100644 index 00000000000..f4042439213 --- /dev/null +++ b/spec/lib/slack_markdown_sanitizer_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe SlackMarkdownSanitizer, feature_category: :integrations do + describe '.sanitize' do + using RSpec::Parameterized::TableSyntax + + where(:input, :output) do + nil | nil + '' | '' + '[label](url)' | 'label(url)' + '<url|label>' | 'urllabel' + '<a href="url">label</a>' | 'a href="url"label/a' + end + + with_them do + it 'returns the expected output' do + expect(described_class.sanitize(input)).to eq(output) + end + end + end +end diff --git a/spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb b/spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb deleted file mode 100644 index 4e17e91f019..00000000000 --- a/spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb +++ /dev/null @@ -1,81 +0,0 @@ -# frozen_string_literal: true - -require 'rake_helper' -require_relative '../../../support/helpers/next_instance_of' - -RSpec.describe 'gitlab:metrics_exporter:install' do - before do - Rake.application.rake_require 'tasks/gitlab/metrics_exporter' - end - - subject(:task) do - Rake::Task['gitlab:metrics_exporter:install'] - end - - context 'when no target directory is specified' do - it 'aborts with an error message' do - expect do - expect { task.execute }.to output(/Please specify the directory/).to_stdout - end.to raise_error(SystemExit) - end - end - - context 'when target directory is specified' do - let(:args) { Rake::TaskArguments.new(%w(dir), %w(path/to/exporter)) } - let(:context) { TOPLEVEL_BINDING.eval('self') } - let(:expected_clone_params) do - { - repo: 'https://gitlab.com/gitlab-org/gitlab-metrics-exporter.git', - version: an_instance_of(String), - target_dir: 'path/to/exporter' - } - end - - context 'when dependencies are missing' do - it 'aborts with an error message' do - expect(Gitlab::Utils).to receive(:which).with('gmake').ordered - expect(Gitlab::Utils).to receive(:which).with('make').ordered - - expect do - expect { task.execute(args) }.to output(/Couldn't find a 'make' binary/).to_stdout - end.to raise_error(SystemExit) - end - end - - it 'installs the exporter with gmake' do - expect(Gitlab::Utils).to receive(:which).with('gmake').and_return('path/to/gmake').ordered - expect(context).to receive(:checkout_or_clone_version).with(hash_including(expected_clone_params)).ordered - expect(Dir).to receive(:chdir).with('path/to/exporter').and_yield.ordered - expect(context).to receive(:run_command!).with(['path/to/gmake']).ordered - - task.execute(args) - end - - it 'installs the exporter with make' do - expect(Gitlab::Utils).to receive(:which).with('gmake').ordered - expect(Gitlab::Utils).to receive(:which).with('make').and_return('path/to/make').ordered - expect(context).to receive(:checkout_or_clone_version).with(hash_including(expected_clone_params)).ordered - expect(Dir).to receive(:chdir).with('path/to/exporter').and_yield.ordered - expect(context).to receive(:run_command!).with(['path/to/make']).ordered - - task.execute(args) - end - - context 'when overriding version via environment variable' do - before do - stub_env('GITLAB_METRICS_EXPORTER_VERSION', '1.0') - end - - it 'clones from repository with that version instead' do - expect(Gitlab::Utils).to receive(:which).with('gmake').and_return('path/to/gmake').ordered - expect(context).to receive(:checkout_or_clone_version).with( - hash_including(expected_clone_params.merge(version: '1.0')) - ).ordered - expect(Dir).to receive(:chdir).with('path/to/exporter').and_yield.ordered - expect(context).to receive(:run_command!).with(['path/to/gmake']).ordered - - task.execute(args) - end - end - end -end |