summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-01-20 09:16:11 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-01-20 09:16:11 +0000
commitedaa33dee2ff2f7ea3fac488d41558eb5f86d68c (patch)
tree11f143effbfeba52329fb7afbd05e6e2a3790241 /spec/lib
parentd8a5691316400a0f7ec4f83832698f1988eb27c1 (diff)
downloadgitlab-ce-edaa33dee2ff2f7ea3fac488d41558eb5f86d68c.tar.gz
Add latest changes from gitlab-org/gitlab@14-7-stable-eev14.7.0-rc42
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/ci/pipeline_spec.rb21
-rw-r--r--spec/lib/api/entities/merge_request_basic_spec.rb3
-rw-r--r--spec/lib/api/helpers/rate_limiter_spec.rb73
-rw-r--r--spec/lib/backup/artifacts_spec.rb2
-rw-r--r--spec/lib/backup/files_spec.rb4
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb32
-rw-r--r--spec/lib/backup/gitaly_rpc_backup_spec.rb10
-rw-r--r--spec/lib/backup/lfs_spec.rb27
-rw-r--r--spec/lib/backup/manager_spec.rb6
-rw-r--r--spec/lib/backup/object_backup_spec.rb36
-rw-r--r--spec/lib/backup/repositories_spec.rb12
-rw-r--r--spec/lib/backup/repository_backup_error_spec.rb42
-rw-r--r--spec/lib/backup/uploads_spec.rb3
-rw-r--r--spec/lib/banzai/filter/footnote_filter_spec.rb46
-rw-r--r--spec/lib/banzai/filter/markdown_filter_spec.rb153
-rw-r--r--spec/lib/banzai/filter/plantuml_filter_spec.rb72
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb5
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb47
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb238
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb41
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb156
-rw-r--r--spec/lib/banzai/reference_parser/merge_request_parser_spec.rb8
-rw-r--r--spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb6
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb31
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb27
-rw-r--r--spec/lib/error_tracking/collector/payload_validator_spec.rb32
-rw-r--r--spec/lib/feature_spec.rb32
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb1371
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb30
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb30
-rw-r--r--spec/lib/gitlab/auth_spec.rb30
-rw-r--r--spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb45
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses_spec.rb27
-rw-r--r--spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_updated_at_after_repository_storage_move_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/base_job_spec.rb16
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_concurrent_schema_change_spec.rb28
-rw-r--r--spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb56
-rw-r--r--spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb232
-rw-r--r--spec/lib/gitlab/background_migration/job_coordinator_spec.rb45
-rw-r--r--spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb158
-rw-r--r--spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb468
-rw-r--r--spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb121
-rw-r--r--spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/changes_access_spec.rb80
-rw-r--r--spec/lib/gitlab/ci/build/status/reason_spec.rb75
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/jwt_v2_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/pipeline/logger_spec.rb84
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/tags/bulk_insert_spec.rb47
-rw-r--r--spec/lib/gitlab/ci/trace/remote_checksum_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb196
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb36
-rw-r--r--spec/lib/gitlab/color_schemes_spec.rb2
-rw-r--r--spec/lib/gitlab/config/entry/configurable_spec.rb9
-rw-r--r--spec/lib/gitlab/config/entry/factory_spec.rb11
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb6
-rw-r--r--spec/lib/gitlab/data_builder/archive_trace_spec.rb19
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb1
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb27
-rw-r--r--spec/lib/gitlab/database/background_migration_job_spec.rb2
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb76
-rw-r--r--spec/lib/gitlab/database/bulk_update_spec.rb2
-rw-r--r--spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb71
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb112
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb626
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb2
-rw-r--r--spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb81
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb3
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb7
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb43
-rw-r--r--spec/lib/gitlab/database/reflection_spec.rb60
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb76
-rw-r--r--spec/lib/gitlab/email/failure_handler_spec.rb69
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb9
-rw-r--r--spec/lib/gitlab/event_store/event_spec.rb64
-rw-r--r--spec/lib/gitlab/event_store/store_spec.rb262
-rw-r--r--spec/lib/gitlab/exceptions_app_spec.rb68
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb2
-rw-r--r--spec/lib/gitlab/git_access_spec.rb8
-rw-r--r--spec/lib/gitlab/gpg/commit_spec.rb24
-rw-r--r--spec/lib/gitlab/http_spec.rb34
-rw-r--r--spec/lib/gitlab/import/set_async_jid_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml4
-rw-r--r--spec/lib/gitlab/import_export/avatar_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/base/relation_factory_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/design_repo_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb41
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb41
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/import_export/uploads_saver_spec.rb4
-rw-r--r--spec/lib/gitlab/integrations/sti_type_spec.rb12
-rw-r--r--spec/lib/gitlab/jwt_authenticatable_spec.rb163
-rw-r--r--spec/lib/gitlab/lets_encrypt/client_spec.rb2
-rw-r--r--spec/lib/gitlab/lfs/client_spec.rb87
-rw-r--r--spec/lib/gitlab/logger_spec.rb94
-rw-r--r--spec/lib/gitlab/mail_room/authenticator_spec.rb188
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb63
-rw-r--r--spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb322
-rw-r--r--spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb74
-rw-r--r--spec/lib/gitlab/metrics/exporter/gc_request_middleware_spec.rb21
-rw-r--r--spec/lib/gitlab/metrics/exporter/health_checks_middleware_spec.rb52
-rw-r--r--spec/lib/gitlab/metrics/exporter/metrics_middleware_spec.rb39
-rw-r--r--spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb53
-rw-r--r--spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb2
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb2
-rw-r--r--spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb42
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_column_data_spec.rb35
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb73
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb37
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb676
-rw-r--r--spec/lib/gitlab/redis/sessions_spec.rb73
-rw-r--r--spec/lib/gitlab/regex_spec.rb2
-rw-r--r--spec/lib/gitlab/search/params_spec.rb8
-rw-r--r--spec/lib/gitlab/shard_health_cache_spec.rb6
-rw-r--r--spec/lib/gitlab/sherlock/collection_spec.rb84
-rw-r--r--spec/lib/gitlab/sherlock/file_sample_spec.rb56
-rw-r--r--spec/lib/gitlab/sherlock/line_profiler_spec.rb75
-rw-r--r--spec/lib/gitlab/sherlock/line_sample_spec.rb35
-rw-r--r--spec/lib/gitlab/sherlock/location_spec.rb42
-rw-r--r--spec/lib/gitlab/sherlock/middleware_spec.rb81
-rw-r--r--spec/lib/gitlab/sherlock/query_spec.rb115
-rw-r--r--spec/lib/gitlab/sherlock/transaction_spec.rb238
-rw-r--r--spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb10
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb40
-rw-r--r--spec/lib/gitlab/sourcegraph_spec.rb6
-rw-r--r--spec/lib/gitlab/ssh_public_key_spec.rb41
-rw-r--r--spec/lib/gitlab/themes_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb40
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb3
-rw-r--r--spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb77
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb55
-rw-r--r--spec/lib/gitlab/web_hooks/recursion_detection_spec.rb221
-rw-r--r--spec/lib/gitlab_edition_spec.rb160
-rw-r--r--spec/lib/gitlab_spec.rb131
-rw-r--r--spec/lib/sidebars/groups/menus/settings_menu_spec.rb6
-rw-r--r--spec/lib/sidebars/projects/panel_spec.rb3
-rw-r--r--spec/lib/version_check_spec.rb6
162 files changed, 5695 insertions, 4449 deletions
diff --git a/spec/lib/api/entities/ci/pipeline_spec.rb b/spec/lib/api/entities/ci/pipeline_spec.rb
index 6a658cc3e18..2b8e59b68c6 100644
--- a/spec/lib/api/entities/ci/pipeline_spec.rb
+++ b/spec/lib/api/entities/ci/pipeline_spec.rb
@@ -3,14 +3,31 @@
require 'spec_helper'
RSpec.describe API::Entities::Ci::Pipeline do
- let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, user: user) }
let_it_be(:job) { create(:ci_build, name: "rspec", coverage: 30.212, pipeline: pipeline) }
let(:entity) { described_class.new(pipeline) }
subject { entity.as_json }
- it 'returns the coverage as a string' do
+ exposed_fields = %i[before_sha tag yaml_errors created_at updated_at started_at finished_at committed_at duration queued_duration]
+
+ exposed_fields.each do |field|
+ it "exposes pipeline #{field}" do
+ expect(subject[field]).to eq(pipeline.public_send(field))
+ end
+ end
+
+ it 'exposes pipeline user basic information' do
+ expect(subject[:user].keys).to include(:avatar_url, :web_url)
+ end
+
+ it 'exposes pipeline detailed status' do
+ expect(subject[:detailed_status].keys).to include(:icon, :favicon)
+ end
+
+ it 'exposes pipeline coverage as a string' do
expect(subject[:coverage]).to eq '30.21'
end
end
diff --git a/spec/lib/api/entities/merge_request_basic_spec.rb b/spec/lib/api/entities/merge_request_basic_spec.rb
index b9d6ab7a652..40f259b86e2 100644
--- a/spec/lib/api/entities/merge_request_basic_spec.rb
+++ b/spec/lib/api/entities/merge_request_basic_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe ::API::Entities::MergeRequestBasic do
it 'includes basic fields' do
is_expected.to include(
draft: merge_request.draft?,
- work_in_progress: merge_request.draft?
+ work_in_progress: merge_request.draft?,
+ merge_user: nil
)
end
diff --git a/spec/lib/api/helpers/rate_limiter_spec.rb b/spec/lib/api/helpers/rate_limiter_spec.rb
new file mode 100644
index 00000000000..2fed1cf3604
--- /dev/null
+++ b/spec/lib/api/helpers/rate_limiter_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::RateLimiter do
+ let(:key) { :some_key }
+ let(:scope) { [:some, :scope] }
+ let(:request) { instance_double('Rack::Request') }
+ let(:user) { build_stubbed(:user) }
+
+ let(:api_class) do
+ Class.new do
+ include API::Helpers::RateLimiter
+
+ attr_reader :request, :current_user
+
+ def initialize(request, current_user)
+ @request = request
+ @current_user = current_user
+ end
+
+ def render_api_error!(**args)
+ end
+ end
+ end
+
+ subject { api_class.new(request, user) }
+
+ before do
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:throttled?)
+ allow(::Gitlab::ApplicationRateLimiter).to receive(:log_request)
+ end
+
+ describe '#check_rate_limit!' do
+ it 'calls ApplicationRateLimiter#throttled? with the right arguments' do
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(key, scope: scope).and_return(false)
+ expect(subject).not_to receive(:render_api_error!)
+
+ subject.check_rate_limit!(key, scope: scope)
+ end
+
+ it 'renders api error and logs request if throttled' do
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(key, scope: scope).and_return(true)
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:log_request).with(request, "#{key}_request_limit".to_sym, user)
+ expect(subject).to receive(:render_api_error!).with({ error: _('This endpoint has been requested too many times. Try again later.') }, 429)
+
+ subject.check_rate_limit!(key, scope: scope)
+ end
+
+ context 'when the bypass header is set' do
+ before do
+ allow(Gitlab::Throttle).to receive(:bypass_header).and_return('SOME_HEADER')
+ end
+
+ it 'skips rate limit if set to "1"' do
+ allow(request).to receive(:get_header).with(Gitlab::Throttle.bypass_header).and_return('1')
+
+ expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+ expect(subject).not_to receive(:render_api_error!)
+
+ subject.check_rate_limit!(key, scope: scope)
+ end
+
+ it 'does not skip rate limit if set to something else than "1"' do
+ allow(request).to receive(:get_header).with(Gitlab::Throttle.bypass_header).and_return('0')
+
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?)
+
+ subject.check_rate_limit!(key, scope: scope)
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/artifacts_spec.rb b/spec/lib/backup/artifacts_spec.rb
index 5a965030b01..102d787a5e1 100644
--- a/spec/lib/backup/artifacts_spec.rb
+++ b/spec/lib/backup/artifacts_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Backup::Artifacts do
Dir.mktmpdir do |tmpdir|
allow(JobArtifactUploader).to receive(:root) { "#{tmpdir}" }
- expect(backup.app_files_dir).to eq("#{tmpdir}")
+ expect(backup.app_files_dir).to eq("#{File.realpath(tmpdir)}")
end
end
end
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index 92de191da2d..6bff0919293 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -134,7 +134,7 @@ RSpec.describe Backup::Files do
expect do
subject.dump
- end.to raise_error(/Backup operation failed:/)
+ end.to raise_error(/Failed to create compressed file/)
end
describe 'with STRATEGY=copy' do
@@ -170,7 +170,7 @@ RSpec.describe Backup::Files do
expect do
subject.dump
end.to output(/rsync failed/).to_stdout
- .and raise_error(/Backup failed/)
+ .and raise_error(/Failed to create compressed file/)
end
end
end
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 2ccde517533..cd0d984fbdb 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
- let(:parallel) { nil }
- let(:parallel_storage) { nil }
+ let(:max_parallelism) { nil }
+ let(:storage_parallelism) { nil }
let(:progress) do
Tempfile.new('progress').tap do |progress|
@@ -23,7 +23,7 @@ RSpec.describe Backup::GitalyBackup do
progress.close
end
- subject { described_class.new(progress, parallel: parallel, parallel_storage: parallel_storage) }
+ subject { described_class.new(progress, max_parallelism: max_parallelism, storage_parallelism: storage_parallelism) }
context 'unknown' do
it 'fails to start unknown' do
@@ -48,7 +48,7 @@ RSpec.describe Backup::GitalyBackup do
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
- subject.wait
+ subject.finish!
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
@@ -58,24 +58,24 @@ RSpec.describe Backup::GitalyBackup do
end
context 'parallel option set' do
- let(:parallel) { 3 }
+ let(:max_parallelism) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
subject.start(:create)
- subject.wait
+ subject.finish!
end
end
context 'parallel_storage option set' do
- let(:parallel_storage) { 3 }
+ let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:create)
- subject.wait
+ subject.finish!
end
end
@@ -83,7 +83,7 @@ RSpec.describe Backup::GitalyBackup do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
subject.start(:create)
- expect { subject.wait }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
+ expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
end
@@ -115,7 +115,7 @@ RSpec.describe Backup::GitalyBackup do
expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original
subject.start(:create)
- subject.wait
+ subject.finish!
end
end
end
@@ -145,7 +145,7 @@ RSpec.describe Backup::GitalyBackup do
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
- subject.wait
+ subject.finish!
collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
@@ -157,24 +157,24 @@ RSpec.describe Backup::GitalyBackup do
end
context 'parallel option set' do
- let(:parallel) { 3 }
+ let(:max_parallelism) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
subject.start(:restore)
- subject.wait
+ subject.finish!
end
end
context 'parallel_storage option set' do
- let(:parallel_storage) { 3 }
+ let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:restore)
- subject.wait
+ subject.finish!
end
end
@@ -182,7 +182,7 @@ RSpec.describe Backup::GitalyBackup do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
subject.start(:restore)
- expect { subject.wait }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
+ expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end
end
end
diff --git a/spec/lib/backup/gitaly_rpc_backup_spec.rb b/spec/lib/backup/gitaly_rpc_backup_spec.rb
index fb442f4a86f..14f9d27ca6e 100644
--- a/spec/lib/backup/gitaly_rpc_backup_spec.rb
+++ b/spec/lib/backup/gitaly_rpc_backup_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Backup::GitalyRpcBackup do
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
- subject.wait
+ subject.finish!
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle'))
@@ -52,7 +52,7 @@ RSpec.describe Backup::GitalyRpcBackup do
it 'logs an appropriate message', :aggregate_failures do
subject.start(:create)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
- subject.wait
+ subject.finish!
expect(progress).to have_received(:puts).with("[Failed] backing up #{project.full_path} (#{project.disk_path})")
expect(progress).to have_received(:puts).with("Error Fail in tests")
@@ -96,7 +96,7 @@ RSpec.describe Backup::GitalyRpcBackup do
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
- subject.wait
+ subject.finish!
collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
@@ -129,7 +129,7 @@ RSpec.describe Backup::GitalyRpcBackup do
subject.enqueue(project, Gitlab::GlRepository::DESIGN)
subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
- subject.wait
+ subject.finish!
end
context 'failure' do
@@ -143,7 +143,7 @@ RSpec.describe Backup::GitalyRpcBackup do
it 'logs an appropriate message', :aggregate_failures do
subject.start(:restore)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
- subject.wait
+ subject.finish!
expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})")
expect(progress).to have_received(:puts).with("Error Fail in tests")
diff --git a/spec/lib/backup/lfs_spec.rb b/spec/lib/backup/lfs_spec.rb
new file mode 100644
index 00000000000..fdc1c0c885d
--- /dev/null
+++ b/spec/lib/backup/lfs_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Lfs do
+ let(:progress) { StringIO.new }
+
+ subject(:backup) { described_class.new(progress) }
+
+ describe '#dump' do
+ before do
+ allow(File).to receive(:realpath).and_call_original
+ allow(File).to receive(:realpath).with('/var/lfs-objects').and_return('/var/lfs-objects')
+ allow(File).to receive(:realpath).with('/var/lfs-objects/..').and_return('/var')
+ allow(Settings.lfs).to receive(:storage_path).and_return('/var/lfs-objects')
+ end
+
+ it 'uses the correct lfs dir in tar command', :aggregate_failures do
+ expect(backup.app_files_dir).to eq('/var/lfs-objects')
+ expect(backup).to receive(:tar).and_return('blabla-tar')
+ expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
+ expect(backup).to receive(:pipeline_succeeded?).and_return(true)
+
+ backup.dump
+ end
+ end
+end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 32eea82cfdf..31cc3012eb1 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Backup::Manager do
end
describe '#pack' do
- let(:expected_backup_contents) { %w(repositories db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz backup_information.yml) }
+ let(:expected_backup_contents) { %w(repositories db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml) }
let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' }
let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } }
let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] }
@@ -57,7 +57,7 @@ RSpec.describe Backup::Manager do
end
context 'when skipped is set in backup_information.yml' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz backup_information.yml} }
+ let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
@@ -74,7 +74,7 @@ RSpec.describe Backup::Manager do
end
context 'when a directory does not exist' do
- let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz backup_information.yml} }
+ let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} }
before do
expect(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'repositories')).and_return(false)
diff --git a/spec/lib/backup/object_backup_spec.rb b/spec/lib/backup/object_backup_spec.rb
new file mode 100644
index 00000000000..6192b5c3482
--- /dev/null
+++ b/spec/lib/backup/object_backup_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'backup object' do |setting|
+ let(:progress) { StringIO.new }
+ let(:backup_path) { "/var/#{setting}" }
+
+ subject(:backup) { described_class.new(progress) }
+
+ describe '#dump' do
+ before do
+ allow(File).to receive(:realpath).and_call_original
+ allow(File).to receive(:realpath).with(backup_path).and_return(backup_path)
+ allow(File).to receive(:realpath).with("#{backup_path}/..").and_return('/var')
+ allow(Settings.send(setting)).to receive(:storage_path).and_return(backup_path)
+ end
+
+ it 'uses the correct storage dir in tar command and excludes tmp', :aggregate_failures do
+ expect(backup.app_files_dir).to eq(backup_path)
+ expect(backup).to receive(:tar).and_return('blabla-tar')
+ expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
+ expect(backup).to receive(:pipeline_succeeded?).and_return(true)
+
+ backup.dump
+ end
+ end
+end
+
+RSpec.describe Backup::Packages do
+ it_behaves_like 'backup object', 'packages'
+end
+
+RSpec.describe Backup::TerraformState do
+ it_behaves_like 'backup object', 'terraform_state'
+end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 85818038c9d..f3830da344b 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Backup::Repositories do
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET)
- expect(strategy).to have_received(:wait)
+ expect(strategy).to have_received(:finish!)
end
end
@@ -49,7 +49,7 @@ RSpec.describe Backup::Repositories do
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
- expect(strategy).to receive(:wait)
+ expect(strategy).to receive(:finish!)
subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
end
@@ -91,7 +91,7 @@ RSpec.describe Backup::Repositories do
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
- expect(strategy).to receive(:wait)
+ expect(strategy).to receive(:finish!)
subject.dump(max_concurrency: 2, max_storage_concurrency: 2)
end
@@ -114,7 +114,7 @@ RSpec.describe Backup::Repositories do
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
- expect(strategy).to receive(:wait)
+ expect(strategy).to receive(:finish!)
subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
end
@@ -128,7 +128,7 @@ RSpec.describe Backup::Repositories do
projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end
- expect(strategy).to receive(:wait)
+ expect(strategy).to receive(:finish!)
subject.dump(max_concurrency: 3, max_storage_concurrency: max_storage_concurrency)
end
@@ -184,7 +184,7 @@ RSpec.describe Backup::Repositories do
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET)
- expect(strategy).to have_received(:wait)
+ expect(strategy).to have_received(:finish!)
end
context 'restoring object pools' do
diff --git a/spec/lib/backup/repository_backup_error_spec.rb b/spec/lib/backup/repository_backup_error_spec.rb
deleted file mode 100644
index 44c75c1cf77..00000000000
--- a/spec/lib/backup/repository_backup_error_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Backup::RepositoryBackupError do
- let_it_be(:snippet) { create(:snippet, content: 'foo', file_name: 'foo') }
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:wiki) { ProjectWiki.new(project, nil ) }
-
- let(:backup_repos_path) { '/tmp/backup/repositories' }
-
- shared_examples 'includes backup path' do
- it { is_expected.to respond_to :container }
- it { is_expected.to respond_to :backup_repos_path }
-
- it 'expects exception message to include repo backup path location' do
- expect(subject.message).to include("#{subject.backup_repos_path}")
- end
-
- it 'expects exception message to include container being back-up' do
- expect(subject.message).to include("#{subject.container.disk_path}")
- end
- end
-
- context 'with snippet repository' do
- subject { described_class.new(snippet, backup_repos_path) }
-
- it_behaves_like 'includes backup path'
- end
-
- context 'with project repository' do
- subject { described_class.new(project, backup_repos_path) }
-
- it_behaves_like 'includes backup path'
- end
-
- context 'with wiki repository' do
- subject { described_class.new(wiki, backup_repos_path) }
-
- it_behaves_like 'includes backup path'
- end
-end
diff --git a/spec/lib/backup/uploads_spec.rb b/spec/lib/backup/uploads_spec.rb
index a82cb764f4d..c173916fe91 100644
--- a/spec/lib/backup/uploads_spec.rb
+++ b/spec/lib/backup/uploads_spec.rb
@@ -14,13 +14,14 @@ RSpec.describe Backup::Uploads do
allow(Gitlab.config.uploads).to receive(:storage_path) { tmpdir }
- expect(backup.app_files_dir).to eq("#{tmpdir}/uploads")
+ expect(backup.app_files_dir).to eq("#{File.realpath(tmpdir)}/uploads")
end
end
end
describe '#dump' do
before do
+ allow(File).to receive(:realpath).and_call_original
allow(File).to receive(:realpath).with('/var/uploads').and_return('/var/uploads')
allow(File).to receive(:realpath).with('/var/uploads/..').and_return('/var')
allow(Gitlab.config.uploads).to receive(:storage_path) { '/var' }
diff --git a/spec/lib/banzai/filter/footnote_filter_spec.rb b/spec/lib/banzai/filter/footnote_filter_spec.rb
index d41f5e8633d..5ac7d3af733 100644
--- a/spec/lib/banzai/filter/footnote_filter_spec.rb
+++ b/spec/lib/banzai/filter/footnote_filter_spec.rb
@@ -56,52 +56,6 @@ RSpec.describe Banzai::Filter::FootnoteFilter do
it 'properly adds the necessary ids and classes' do
expect(doc.to_html).to eq filtered_footnote.strip
end
-
- context 'using ruby-based HTML renderer' do
- # first[^1] and second[^second]
- # [^1]: one
- # [^second]: two
- let(:footnote) do
- <<~EOF
- <p>first<sup><a href="#fn1" id="fnref1">1</a></sup> and second<sup><a href="#fn2" id="fnref2">2</a></sup></p>
- <p>same reference<sup><a href="#fn1" id="fnref1">1</a></sup></p>
- <ol>
- <li id="fn1">
- <p>one <a href="#fnref1">↩</a></p>
- </li>
- <li id="fn2">
- <p>two <a href="#fnref2">↩</a></p>
- </li>
- </ol>
- EOF
- end
-
- let(:filtered_footnote) do
- <<~EOF
- <p>first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup></p>
- <p>same reference<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup></p>
- <section class="footnotes"><ol>
- <li id="fn1-#{identifier}">
- <p>one <a href="#fnref1-#{identifier}" class="footnote-backref">↩</a></p>
- </li>
- <li id="fn2-#{identifier}">
- <p>two <a href="#fnref2-#{identifier}" class="footnote-backref">↩</a></p>
- </li>
- </ol></section>
- EOF
- end
-
- let(:doc) { filter(footnote) }
- let(:identifier) { link_node[:id].delete_prefix('fnref1-') }
-
- before do
- stub_feature_flags(use_cmark_renderer: false)
- end
-
- it 'properly adds the necessary ids and classes' do
- expect(doc.to_html).to eq filtered_footnote
- end
- end
end
context 'when detecting footnotes' do
diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb
index 1c9b894e885..e3c8d121587 100644
--- a/spec/lib/banzai/filter/markdown_filter_spec.rb
+++ b/spec/lib/banzai/filter/markdown_filter_spec.rb
@@ -5,125 +5,90 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::MarkdownFilter do
include FilterSpecHelper
- shared_examples_for 'renders correct markdown' do
- describe 'markdown engine from context' do
- it 'defaults to CommonMark' do
- expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance|
- expect(instance).to receive(:render).and_return('test')
- end
-
- filter('test')
+ describe 'markdown engine from context' do
+ it 'defaults to CommonMark' do
+ expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance|
+ expect(instance).to receive(:render).and_return('test')
end
- it 'uses CommonMark' do
- expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance|
- expect(instance).to receive(:render).and_return('test')
- end
+ filter('test')
+ end
- filter('test', { markdown_engine: :common_mark })
+ it 'uses CommonMark' do
+ expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance|
+ expect(instance).to receive(:render).and_return('test')
end
+
+ filter('test', { markdown_engine: :common_mark })
end
+ end
- describe 'code block' do
- context 'using CommonMark' do
- before do
- stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark)
- end
-
- it 'adds language to lang attribute when specified' do
- result = filter("```html\nsome code\n```", no_sourcepos: true)
-
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- expect(result).to start_with('<pre lang="html"><code>')
- else
- expect(result).to start_with('<pre><code lang="html">')
- end
- end
-
- it 'does not add language to lang attribute when not specified' do
- result = filter("```\nsome code\n```", no_sourcepos: true)
-
- expect(result).to start_with('<pre><code>')
- end
-
- it 'works with utf8 chars in language' do
- result = filter("```æ—¥\nsome code\n```", no_sourcepos: true)
-
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- expect(result).to start_with('<pre lang="æ—¥"><code>')
- else
- expect(result).to start_with('<pre><code lang="æ—¥">')
- end
- end
-
- it 'works with additional language parameters' do
- result = filter("```ruby:red gem foo\nsome code\n```", no_sourcepos: true)
-
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- expect(result).to start_with('<pre lang="ruby:red" data-meta="gem foo"><code>')
- else
- expect(result).to start_with('<pre><code lang="ruby:red gem foo">')
- end
- end
+ describe 'code block' do
+ context 'using CommonMark' do
+ before do
+ stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark)
end
- end
- describe 'source line position' do
- context 'using CommonMark' do
- before do
- stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark)
- end
+ it 'adds language to lang attribute when specified' do
+ result = filter("```html\nsome code\n```", no_sourcepos: true)
- it 'defaults to add data-sourcepos' do
- result = filter('test')
+ expect(result).to start_with('<pre lang="html"><code>')
+ end
- expect(result).to eq '<p data-sourcepos="1:1-1:4">test</p>'
- end
+ it 'does not add language to lang attribute when not specified' do
+ result = filter("```\nsome code\n```", no_sourcepos: true)
- it 'disables data-sourcepos' do
- result = filter('test', no_sourcepos: true)
+ expect(result).to start_with('<pre><code>')
+ end
+
+ it 'works with utf8 chars in language' do
+ result = filter("```æ—¥\nsome code\n```", no_sourcepos: true)
- expect(result).to eq '<p>test</p>'
- end
+ expect(result).to start_with('<pre lang="æ—¥"><code>')
+ end
+
+ it 'works with additional language parameters' do
+ result = filter("```ruby:red gem foo\nsome code\n```", no_sourcepos: true)
+
+ expect(result).to start_with('<pre lang="ruby:red" data-meta="gem foo"><code>')
end
end
+ end
- describe 'footnotes in tables' do
- it 'processes footnotes in table cells' do
- text = <<-MD.strip_heredoc
- | Column1 |
- | --------- |
- | foot [^1] |
+ describe 'source line position' do
+ context 'using CommonMark' do
+ before do
+ stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark)
+ end
- [^1]: a footnote
- MD
+ it 'defaults to add data-sourcepos' do
+ result = filter('test')
- result = filter(text, no_sourcepos: true)
+ expect(result).to eq '<p data-sourcepos="1:1-1:4">test</p>'
+ end
- expect(result).to include('<td>foot <sup')
+ it 'disables data-sourcepos' do
+ result = filter('test', no_sourcepos: true)
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- expect(result).to include('<section class="footnotes" data-footnotes>')
- else
- expect(result).to include('<section class="footnotes">')
- end
+ expect(result).to eq '<p>test</p>'
end
end
end
- context 'using ruby-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: false)
- end
+ describe 'footnotes in tables' do
+ it 'processes footnotes in table cells' do
+ text = <<-MD.strip_heredoc
+ | Column1 |
+ | --------- |
+ | foot [^1] |
- it_behaves_like 'renders correct markdown'
- end
+ [^1]: a footnote
+ MD
- context 'using c-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: true)
- end
+ result = filter(text, no_sourcepos: true)
- it_behaves_like 'renders correct markdown'
+ expect(result).to include('<td>foot <sup')
+ expect(result).to include('<section class="footnotes" data-footnotes>')
+ end
end
end
diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb
index e1e02c09fbe..2d1a01116e0 100644
--- a/spec/lib/banzai/filter/plantuml_filter_spec.rb
+++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb
@@ -5,67 +5,33 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::PlantumlFilter do
include FilterSpecHelper
- shared_examples_for 'renders correct markdown' do
- it 'replaces plantuml pre tag with img tag' do
- stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080")
+ it 'replaces plantuml pre tag with img tag' do
+ stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080")
- input = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
- else
- '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>'
- end
+ input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
+ output = '<div class="imageblock"><div class="content"><img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq"></div></div>'
+ doc = filter(input)
- output = '<div class="imageblock"><div class="content"><img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq"></div></div>'
- doc = filter(input)
-
- expect(doc.to_s).to eq output
- end
-
- it 'does not replace plantuml pre tag with img tag if disabled' do
- stub_application_setting(plantuml_enabled: false)
-
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
- output = '<pre lang="plantuml"><code>Bob -&gt; Sara : Hello</code></pre>'
- else
- input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>'
- output = '<pre><code lang="plantuml">Bob -&gt; Sara : Hello</code></pre>'
- end
-
- doc = filter(input)
-
- expect(doc.to_s).to eq output
- end
-
- it 'does not replace plantuml pre tag with img tag if url is invalid' do
- stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid")
+ expect(doc.to_s).to eq output
+ end
- input = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
- else
- '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>'
- end
+ it 'does not replace plantuml pre tag with img tag if disabled' do
+ stub_application_setting(plantuml_enabled: false)
- output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> Error: cannot connect to PlantUML server at "invalid"</pre></div></div>'
- doc = filter(input)
+ input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
+ output = '<pre lang="plantuml"><code>Bob -&gt; Sara : Hello</code></pre>'
+ doc = filter(input)
- expect(doc.to_s).to eq output
- end
+ expect(doc.to_s).to eq output
end
- context 'using ruby-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: false)
- end
-
- it_behaves_like 'renders correct markdown'
- end
+ it 'does not replace plantuml pre tag with img tag if url is invalid' do
+ stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid")
- context 'using c-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: true)
- end
+ input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>'
+ output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> Error: cannot connect to PlantUML server at "invalid"</pre></div></div>'
+ doc = filter(input)
- it_behaves_like 'renders correct markdown'
+ expect(doc.to_s).to eq output
end
end
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index 14c1542b724..b3523a25116 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -122,6 +122,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
expect(link).to have_attribute('data-reference-format')
expect(link.attr('data-reference-format')).to eq('+')
+ expect(link.attr('href')).to eq(issue_url)
end
it 'includes a data-reference-format attribute for URL references' do
@@ -130,6 +131,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
expect(link).to have_attribute('data-reference-format')
expect(link.attr('data-reference-format')).to eq('+')
+ expect(link.attr('href')).to eq(issue_url)
end
it 'supports an :only_path context' do
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index 3c488820853..e5809ac6949 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -51,6 +51,7 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
context 'internal reference' do
let(:reference) { merge.to_reference }
+ let(:merge_request_url) { urls.project_merge_request_url(project, merge) }
it 'links to a valid reference' do
doc = reference_filter("See #{reference}")
@@ -115,14 +116,16 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(link).to have_attribute('data-reference-format')
expect(link.attr('data-reference-format')).to eq('+')
+ expect(link.attr('href')).to eq(merge_request_url)
end
it 'includes a data-reference-format attribute for URL references' do
- doc = reference_filter("Merge #{urls.project_merge_request_url(project, merge)}+")
+ doc = reference_filter("Merge #{merge_request_url}+")
link = doc.css('a').first
expect(link).to have_attribute('data-reference-format')
expect(link.attr('data-reference-format')).to eq('+')
+ expect(link.attr('href')).to eq(merge_request_url)
end
it 'supports an :only_path context' do
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index 24e787bddd5..039ca36af6e 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -177,53 +177,6 @@ RSpec.describe Banzai::Filter::SanitizationFilter do
expect(act.to_html).to eq exp
end
end
-
- context 'using ruby-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: false)
- end
-
- it 'allows correct footnote id property on links' do
- exp = %q(<a href="#fn1" id="fnref1">foo/bar.md</a>)
- act = filter(exp)
-
- expect(act.to_html).to eq exp
- end
-
- it 'allows correct footnote id property on li element' do
- exp = %q(<ol><li id="fn1">footnote</li></ol>)
- act = filter(exp)
-
- expect(act.to_html).to eq exp
- end
-
- it 'removes invalid id for footnote links' do
- exp = %q(<a href="#fn1">link</a>)
-
- %w[fnrefx test xfnref1].each do |id|
- act = filter(%(<a href="#fn1" id="#{id}">link</a>))
-
- expect(act.to_html).to eq exp
- end
- end
-
- it 'removes invalid id for footnote li' do
- exp = %q(<ol><li>footnote</li></ol>)
-
- %w[fnx test xfn1].each do |id|
- act = filter(%(<ol><li id="#{id}">footnote</li></ol>))
-
- expect(act.to_html).to eq exp
- end
- end
-
- it 'allows footnotes numbered higher than 9' do
- exp = %q(<a href="#fn15" id="fnref15">link</a><ol><li id="fn15">footnote</li></ol>)
- act = filter(exp)
-
- expect(act.to_html).to eq exp
- end
- end
end
end
end
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index ef46fd62486..aee4bd93207 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -19,202 +19,150 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
end
end
- shared_examples_for 'renders correct markdown' do
- context "when no language is specified" do
- it "highlights as plaintext" do
- result = filter('<pre><code>def fun end</code></pre>')
+ context "when no language is specified" do
+ it "highlights as plaintext" do
+ result = filter('<pre><code>def fun end</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>')
- end
-
- include_examples "XSS prevention", ""
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>')
end
- context "when contains mermaid diagrams" do
- it "ignores mermaid blocks" do
- result = filter('<pre data-mermaid-style="display"><code>mermaid code</code></pre>')
+ include_examples "XSS prevention", ""
+ end
- expect(result.to_html).to eq('<pre data-mermaid-style="display"><code>mermaid code</code></pre>')
- end
+ context "when contains mermaid diagrams" do
+ it "ignores mermaid blocks" do
+ result = filter('<pre data-mermaid-style="display"><code>mermaid code</code></pre>')
+
+ expect(result.to_html).to eq('<pre data-mermaid-style="display"><code>mermaid code</code></pre>')
end
+ end
- context "when a valid language is specified" do
- it "highlights as that language" do
- result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- filter('<pre lang="ruby"><code>def fun end</code></pre>')
- else
- filter('<pre><code lang="ruby">def fun end</code></pre>')
- end
+ context "when <pre> contains multiple <code> tags" do
+ it "ignores the block" do
+ result = filter('<pre><code>one</code> and <code>two</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre><copy-code></copy-code></div>')
- end
+ expect(result.to_html).to eq('<pre><code>one</code> and <code>two</code></pre>')
+ end
+ end
- include_examples "XSS prevention", "ruby"
+ context "when a valid language is specified" do
+ it "highlights as that language" do
+ result = filter('<pre lang="ruby"><code>def fun end</code></pre>')
+
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre><copy-code></copy-code></div>')
end
- context "when an invalid language is specified" do
- it "highlights as plaintext" do
- result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- filter('<pre lang="gnuplot"><code>This is a test</code></pre>')
- else
- filter('<pre><code lang="gnuplot">This is a test</code></pre>')
- end
+ include_examples "XSS prevention", "ruby"
+ end
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
- end
+ context "when an invalid language is specified" do
+ it "highlights as plaintext" do
+ result = filter('<pre lang="gnuplot"><code>This is a test</code></pre>')
- include_examples "XSS prevention", "gnuplot"
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
end
- context "languages that should be passed through" do
- let(:delimiter) { described_class::LANG_PARAMS_DELIMITER }
- let(:data_attr) { described_class::LANG_PARAMS_ATTR }
+ include_examples "XSS prevention", "gnuplot"
+ end
- %w(math mermaid plantuml suggestion).each do |lang|
- context "when #{lang} is specified" do
- it "highlights as plaintext but with the correct language attribute and class" do
- result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- filter(%{<pre lang="#{lang}"><code>This is a test</code></pre>})
- else
- filter(%{<pre><code lang="#{lang}">This is a test</code></pre>})
- end
+ context "languages that should be passed through" do
+ let(:delimiter) { described_class::LANG_PARAMS_DELIMITER }
+ let(:data_attr) { described_class::LANG_PARAMS_ATTR }
- expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
- end
+ %w(math mermaid plantuml suggestion).each do |lang|
+ context "when #{lang} is specified" do
+ it "highlights as plaintext but with the correct language attribute and class" do
+ result = filter(%{<pre lang="#{lang}"><code>This is a test</code></pre>})
- include_examples "XSS prevention", lang
+ expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
end
- context "when #{lang} has extra params" do
- let(:lang_params) { 'foo-bar-kux' }
-
- let(:xss_lang) do
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- "#{lang} data-meta=\"foo-bar-kux\"&lt;script&gt;alert(1)&lt;/script&gt;"
- else
- "#{lang}#{described_class::LANG_PARAMS_DELIMITER}&lt;script&gt;alert(1)&lt;/script&gt;"
- end
- end
-
- it "includes data-lang-params tag with extra information" do
- result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- filter(%{<pre lang="#{lang}" data-meta="#{lang_params}"><code>This is a test</code></pre>})
- else
- filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}">This is a test</code></pre>})
- end
-
- expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
- end
-
- include_examples "XSS prevention", lang
-
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- include_examples "XSS prevention",
- "#{lang} data-meta=\"foo-bar-kux\"&lt;script&gt;alert(1)&lt;/script&gt;"
- else
- include_examples "XSS prevention",
- "#{lang}#{described_class::LANG_PARAMS_DELIMITER}&lt;script&gt;alert(1)&lt;/script&gt;"
- end
-
- include_examples "XSS prevention",
- "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>"
- end
+ include_examples "XSS prevention", lang
end
- context 'when multiple param delimiters are used' do
- let(:lang) { 'suggestion' }
- let(:lang_params) { '-1+10' }
+ context "when #{lang} has extra params" do
+ let(:lang_params) { 'foo-bar-kux' }
+ let(:xss_lang) { "#{lang} data-meta=\"foo-bar-kux\"&lt;script&gt;alert(1)&lt;/script&gt;" }
- let(:expected_result) do
- %{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}
- end
-
- context 'when delimiter is space' do
- it 'delimits on the first appearance' do
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- result = filter(%{<pre lang="#{lang}" data-meta="#{lang_params} more-things"><code>This is a test</code></pre>})
+ it "includes data-lang-params tag with extra information" do
+ result = filter(%{<pre lang="#{lang}" data-meta="#{lang_params}"><code>This is a test</code></pre>})
- expect(result.to_html.delete("\n")).to eq(expected_result)
- else
- result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}#{delimiter}more-things">This is a test</code></pre>})
-
- expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
- end
- end
+ expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>})
end
- context 'when delimiter is colon' do
- it 'delimits on the first appearance' do
- result = filter(%{<pre lang="#{lang}#{delimiter}#{lang_params} more-things"><code>This is a test</code></pre>})
+ include_examples "XSS prevention", lang
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- expect(result.to_html.delete("\n")).to eq(expected_result)
- else
- expect(result.to_html.delete("\n")).to eq(%{<div class="gl-relative markdown-code-block js-markdown-code"><pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">This is a test</span></code></pre><copy-code></copy-code></div>})
- end
- end
- end
+ include_examples "XSS prevention",
+ "#{lang} data-meta=\"foo-bar-kux\"&lt;script&gt;alert(1)&lt;/script&gt;"
+
+ include_examples "XSS prevention",
+ "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>"
end
end
- context "when sourcepos metadata is available" do
- it "includes it in the highlighted code block" do
- result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>')
+ context 'when multiple param delimiters are used' do
+ let(:lang) { 'suggestion' }
+ let(:lang_params) { '-1+10' }
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
+ let(:expected_result) do
+ %{<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre><copy-code></copy-code></div>}
end
- end
- context "when Rouge lexing fails" do
- before do
- allow_next_instance_of(Rouge::Lexers::Ruby) do |instance|
- allow(instance).to receive(:stream_tokens).and_raise(StandardError)
+ context 'when delimiter is space' do
+ it 'delimits on the first appearance' do
+ result = filter(%{<pre lang="#{lang}" data-meta="#{lang_params} more-things"><code>This is a test</code></pre>})
+
+ expect(result.to_html.delete("\n")).to eq(expected_result)
end
end
- it "highlights as plaintext" do
- result = if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- filter('<pre lang="ruby"><code>This is a test</code></pre>')
- else
- filter('<pre><code lang="ruby">This is a test</code></pre>')
- end
+ context 'when delimiter is colon' do
+ it 'delimits on the first appearance' do
+ result = filter(%{<pre lang="#{lang}#{delimiter}#{lang_params} more-things"><code>This is a test</code></pre>})
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq(expected_result)
+ end
end
-
- include_examples "XSS prevention", "ruby"
end
+ end
- context "when Rouge lexing fails after a retry" do
- before do
- allow_next_instance_of(Rouge::Lexers::PlainText) do |instance|
- allow(instance).to receive(:stream_tokens).and_raise(StandardError)
- end
- end
+ context "when sourcepos metadata is available" do
+ it "includes it in the highlighted code block" do
+ result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>')
- it "does not add highlighting classes" do
- result = filter('<pre><code>This is a test</code></pre>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
+ end
+ end
- expect(result.to_html).to eq('<pre><code>This is a test</code></pre>')
+ context "when Rouge lexing fails" do
+ before do
+ allow_next_instance_of(Rouge::Lexers::Ruby) do |instance|
+ allow(instance).to receive(:stream_tokens).and_raise(StandardError)
end
+ end
+
+ it "highlights as plaintext" do
+ result = filter('<pre lang="ruby"><code>This is a test</code></pre>')
- include_examples "XSS prevention", "ruby"
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>')
end
+
+ include_examples "XSS prevention", "ruby"
end
- context 'using ruby-based HTML renderer' do
+ context "when Rouge lexing fails after a retry" do
before do
- stub_feature_flags(use_cmark_renderer: false)
+ allow_next_instance_of(Rouge::Lexers::PlainText) do |instance|
+ allow(instance).to receive(:stream_tokens).and_raise(StandardError)
+ end
end
- it_behaves_like 'renders correct markdown'
- end
+ it "does not add highlighting classes" do
+ result = filter('<pre><code>This is a test</code></pre>')
- context 'using c-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: true)
+ expect(result.to_html).to eq('<pre><code>This is a test</code></pre>')
end
- it_behaves_like 'renders correct markdown'
+ include_examples "XSS prevention", "ruby"
end
end
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 620b7d97a5b..376edfb99fc 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -65,47 +65,6 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote.strip
end
-
- context 'using ruby-based HTML renderer' do
- let(:html) { described_class.to_html(footnote_markdown, project: project) }
- let(:identifier) { html[/.*fnref1-(\d+).*/, 1] }
- let(:footnote_markdown) do
- <<~EOF
- first[^1] and second[^second] and twenty[^twenty]
- [^1]: one
- [^second]: two
- [^twenty]: twenty
- EOF
- end
-
- let(:filtered_footnote) do
- <<~EOF
- <p dir="auto">first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn3-#{identifier}" id="fnref3-#{identifier}">3</a></sup></p>
-
- <section class="footnotes"><ol>
- <li id="fn1-#{identifier}">
- <p>one <a href="#fnref1-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
- </li>
- <li id="fn2-#{identifier}">
- <p>two <a href="#fnref2-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
- </li>
- <li id="fn3-#{identifier}">
- <p>twenty <a href="#fnref3-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
- </li>
- </ol></section>
- EOF
- end
-
- before do
- stub_feature_flags(use_cmark_renderer: false)
- end
-
- it 'properly adds the necessary ids and classes' do
- stub_commonmark_sourcepos_disabled
-
- expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote
- end
- end
end
describe 'links are detected as malicious' do
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
index c8cd9d4fcac..80392fe264f 100644
--- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -5,117 +5,93 @@ require 'spec_helper'
RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
using RSpec::Parameterized::TableSyntax
- shared_examples_for 'renders correct markdown' do
- describe 'CommonMark tests', :aggregate_failures do
- it 'converts all reference punctuation to literals' do
- reference_chars = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS
- markdown = reference_chars.split('').map {|char| char.prepend("\\") }.join
- punctuation = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.split('')
- punctuation = punctuation.delete_if {|char| char == '&' }
- punctuation << '&amp;'
-
- result = described_class.call(markdown, project: project)
- output = result[:output].to_html
-
- punctuation.each { |char| expect(output).to include("<span>#{char}</span>") }
- expect(result[:escaped_literals]).to be_truthy
- end
+ describe 'backslash escapes', :aggregate_failures do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
- it 'ensure we handle all the GitLab reference characters', :eager_load do
- reference_chars = ObjectSpace.each_object(Class).map do |klass|
- next unless klass.included_modules.include?(Referable)
- next unless klass.respond_to?(:reference_prefix)
- next unless klass.reference_prefix.length == 1
+ it 'converts all reference punctuation to literals' do
+ reference_chars = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS
+ markdown = reference_chars.split('').map {|char| char.prepend("\\") }.join
+ punctuation = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.split('')
+ punctuation = punctuation.delete_if {|char| char == '&' }
+ punctuation << '&amp;'
- klass.reference_prefix
- end.compact
+ result = described_class.call(markdown, project: project)
+ output = result[:output].to_html
- reference_chars.all? do |char|
- Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.include?(char)
- end
- end
+ punctuation.each { |char| expect(output).to include("<span>#{char}</span>") }
+ expect(result[:escaped_literals]).to be_truthy
+ end
- it 'does not convert non-reference punctuation to spans' do
- markdown = %q(\"\'\*\+\,\-\.\/\:\;\<\=\>\?\[\]\_\`\{\|\}) + %q[\(\)\\\\]
+ it 'ensure we handle all the GitLab reference characters', :eager_load do
+ reference_chars = ObjectSpace.each_object(Class).map do |klass|
+ next unless klass.included_modules.include?(Referable)
+ next unless klass.respond_to?(:reference_prefix)
+ next unless klass.reference_prefix.length == 1
- result = described_class.call(markdown, project: project)
- output = result[:output].to_html
+ klass.reference_prefix
+ end.compact
- expect(output).not_to include('<span>')
- expect(result[:escaped_literals]).to be_falsey
+ reference_chars.all? do |char|
+ Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.include?(char)
end
+ end
- it 'does not convert other characters to literals' do
- markdown = %q(\→\A\a\ \3\φ\«)
- expected = '\→\A\a\ \3\φ\«'
-
- result = correct_html_included(markdown, expected)
- expect(result[:escaped_literals]).to be_falsey
- end
+ it 'does not convert non-reference punctuation to spans' do
+ markdown = %q(\"\'\*\+\,\-\.\/\:\;\<\=\>\?\[\]\_\`\{\|\}) + %q[\(\)\\\\]
- describe 'backslash escapes do not work in code blocks, code spans, autolinks, or raw HTML' do
- where(:markdown, :expected) do
- %q(`` \@\! ``) | %q(<code>\@\!</code>)
- %q( \@\!) | %Q(<code>\\@\\!\n</code>)
- %Q(~~~\n\\@\\!\n~~~) | %Q(<code>\\@\\!\n</code>)
- %q(<http://example.com?find=\@>) | %q(<a href="http://example.com?find=%5C@">http://example.com?find=\@</a>)
- %q[<a href="/bar\@)">] | %q[<a href="/bar%5C@)">]
- end
-
- with_them do
- it { correct_html_included(markdown, expected) }
- end
- end
+ result = described_class.call(markdown, project: project)
+ output = result[:output].to_html
- describe 'work in all other contexts, including URLs and link titles, link references, and info strings in fenced code blocks' do
- let(:markdown) { %Q(``` foo\\@bar\nfoo\n```) }
-
- it 'renders correct html' do
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- correct_html_included(markdown, %Q(<pre data-sourcepos="1:1-3:3" lang="foo@bar"><code>foo\n</code></pre>))
- else
- correct_html_included(markdown, %Q(<code lang="foo@bar">foo\n</code>))
- end
- end
-
- where(:markdown, :expected) do
- %q![foo](/bar\@ "\@title")! | %q(<a href="/bar@" title="@title">foo</a>)
- %Q![foo]\n\n[foo]: /bar\\@ "\\@title"! | %q(<a href="/bar@" title="@title">foo</a>)
- end
-
- with_them do
- it { correct_html_included(markdown, expected) }
- end
- end
+ expect(output).not_to include('<span>')
+ expect(result[:escaped_literals]).to be_falsey
end
- end
-
- describe 'backslash escapes' do
- let_it_be(:project) { create(:project, :public) }
- let_it_be(:issue) { create(:issue, project: project) }
-
- def correct_html_included(markdown, expected)
- result = described_class.call(markdown, {})
- expect(result[:output].to_html).to include(expected)
+ it 'does not convert other characters to literals' do
+ markdown = %q(\→\A\a\ \3\φ\«)
+ expected = '\→\A\a\ \3\φ\«'
- result
+ result = correct_html_included(markdown, expected)
+ expect(result[:escaped_literals]).to be_falsey
end
- context 'using ruby-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: false)
+ describe 'backslash escapes do not work in code blocks, code spans, autolinks, or raw HTML' do
+ where(:markdown, :expected) do
+ %q(`` \@\! ``) | %q(<code>\@\!</code>)
+ %q( \@\!) | %Q(<code>\\@\\!\n</code>)
+ %Q(~~~\n\\@\\!\n~~~) | %Q(<code>\\@\\!\n</code>)
+ %q(<http://example.com?find=\@>) | %q(<a href="http://example.com?find=%5C@">http://example.com?find=\@</a>)
+ %q[<a href="/bar\@)">] | %q[<a href="/bar%5C@)">]
end
- it_behaves_like 'renders correct markdown'
+ with_them do
+ it { correct_html_included(markdown, expected) }
+ end
end
- context 'using c-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: true)
+ describe 'work in all other contexts, including URLs and link titles, link references, and info strings in fenced code blocks' do
+ let(:markdown) { %Q(``` foo\\@bar\nfoo\n```) }
+
+ it 'renders correct html' do
+ correct_html_included(markdown, %Q(<pre data-sourcepos="1:1-3:3" lang="foo@bar"><code>foo\n</code></pre>))
+ end
+
+ where(:markdown, :expected) do
+ %q![foo](/bar\@ "\@title")! | %q(<a href="/bar@" title="@title">foo</a>)
+ %Q![foo]\n\n[foo]: /bar\\@ "\\@title"! | %q(<a href="/bar@" title="@title">foo</a>)
end
- it_behaves_like 'renders correct markdown'
+ with_them do
+ it { correct_html_included(markdown, expected) }
+ end
end
end
+
+ def correct_html_included(markdown, expected)
+ result = described_class.call(markdown, {})
+
+ expect(result[:output].to_html).to include(expected)
+
+ result
+ end
end
diff --git a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
index 04c35c8b082..3fbda7f3239 100644
--- a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
@@ -23,14 +23,6 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser do
end
it_behaves_like "referenced feature visibility", "merge_requests"
-
- context 'when optimize_merge_request_parser feature flag is off' do
- before do
- stub_feature_flags(optimize_merge_request_parser: false)
- end
-
- it_behaves_like "referenced feature visibility", "merge_requests"
- end
end
end
diff --git a/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb
index bd306233de8..d6e19a5fc85 100644
--- a/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb
+++ b/spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do
before do
allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
- subject.instance_variable_set(:@tmp_dir, tmpdir)
+ subject.instance_variable_set(:@tmpdir, tmpdir)
end
after(:all) do
@@ -43,11 +43,11 @@ RSpec.describe BulkImports::Common::Extractors::NdjsonExtractor do
end
end
- describe '#remove_tmp_dir' do
+ describe '#remove_tmpdir' do
it 'removes tmp dir' do
expect(FileUtils).to receive(:remove_entry).with(tmpdir).once
- subject.remove_tmp_dir
+ subject.remove_tmpdir
end
end
end
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index 3b5ea131d0d..9d43bb3ebfb 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
- let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
+ let(:tmpdir) { Dir.mktmpdir }
let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
@@ -80,10 +80,10 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
.with(
configuration: context.configuration,
relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=uploads",
- dir: tmpdir,
+ tmpdir: tmpdir,
filename: 'uploads.tar.gz')
.and_return(download_service)
- expect(BulkImports::FileDecompressionService).to receive(:new).with(dir: tmpdir, filename: 'uploads.tar.gz').and_return(decompression_service)
+ expect(BulkImports::FileDecompressionService).to receive(:new).with(tmpdir: tmpdir, filename: 'uploads.tar.gz').and_return(decompression_service)
expect(BulkImports::ArchiveExtractionService).to receive(:new).with(tmpdir: tmpdir, filename: 'uploads.tar').and_return(extraction_service)
expect(download_service).to receive(:execute)
@@ -123,6 +123,31 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
end
end
end
+
+ describe '#after_run' do
+ before do
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ it 'removes tmp dir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when dir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
end
context 'when importing to group' do
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
index 11c475318bb..df7ff5b8062 100644
--- a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
subject(:pipeline) { described_class.new(context) }
before do
- allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
end
after do
@@ -95,13 +95,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
.with(
configuration: context.configuration,
relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
- dir: tmpdir,
+ tmpdir: tmpdir,
filename: 'self.json.gz')
.and_return(file_download_service)
expect(BulkImports::FileDecompressionService)
.to receive(:new)
- .with(dir: tmpdir, filename: 'self.json.gz')
+ .with(tmpdir: tmpdir, filename: 'self.json.gz')
.and_return(file_decompression_service)
expect(file_download_service).to receive(:execute)
@@ -156,4 +156,25 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
pipeline.json_attributes
end
end
+
+ describe '#after_run' do
+ it 'removes tmp dir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when dir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
end
diff --git a/spec/lib/error_tracking/collector/payload_validator_spec.rb b/spec/lib/error_tracking/collector/payload_validator_spec.rb
index ab5ec448dff..94708f63bf4 100644
--- a/spec/lib/error_tracking/collector/payload_validator_spec.rb
+++ b/spec/lib/error_tracking/collector/payload_validator_spec.rb
@@ -18,37 +18,25 @@ RSpec.describe ErrorTracking::Collector::PayloadValidator do
end
end
- context 'ruby payload' do
- let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/parsed_event.json')) }
-
- it_behaves_like 'valid payload'
- end
-
- context 'python payload' do
- let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/python_event.json')) }
-
- it_behaves_like 'valid payload'
- end
-
- context 'python payload in repl' do
- let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/python_event_repl.json')) }
-
- it_behaves_like 'valid payload'
- end
+ context 'with event fixtures' do
+ where(:event_fixture) do
+ Dir.glob(Rails.root.join('spec/fixtures/error_tracking/*event*.json'))
+ end
- context 'browser payload' do
- let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/browser_event.json')) }
+ with_them do
+ let(:payload) { Gitlab::Json.parse(fixture_file(event_fixture)) }
- it_behaves_like 'valid payload'
+ it_behaves_like 'valid payload'
+ end
end
- context 'empty payload' do
+ context 'when empty' do
let(:payload) { '' }
it_behaves_like 'invalid payload'
end
- context 'invalid payload' do
+ context 'when invalid' do
let(:payload) { { 'foo' => 'bar' } }
it_behaves_like 'invalid payload'
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 82580d5d700..8c546390201 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -3,12 +3,40 @@
require 'spec_helper'
RSpec.describe Feature, stub_feature_flags: false do
+ include StubVersion
+
before do
# reset Flipper AR-engine
Feature.reset
skip_feature_flags_yaml_validation
end
+ describe '.feature_flags_available?' do
+ it 'returns false on connection error' do
+ expect(ActiveRecord::Base.connection).to receive(:active?).and_raise(PG::ConnectionBad) # rubocop:disable Database/MultipleDatabases
+
+ expect(described_class.feature_flags_available?).to eq(false)
+ end
+
+ it 'returns false when connection is not active' do
+ expect(ActiveRecord::Base.connection).to receive(:active?).and_return(false) # rubocop:disable Database/MultipleDatabases
+
+ expect(described_class.feature_flags_available?).to eq(false)
+ end
+
+ it 'returns false when the flipper table does not exist' do
+ expect(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
+
+ expect(described_class.feature_flags_available?).to eq(false)
+ end
+
+ it 'returns false on NoDatabaseError' do
+ expect(Feature::FlipperFeature).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError)
+
+ expect(described_class.feature_flags_available?).to eq(false)
+ end
+ end
+
describe '.get' do
let(:feature) { double(:feature) }
let(:key) { 'my_feature' }
@@ -585,6 +613,10 @@ RSpec.describe Feature, stub_feature_flags: false do
context 'when flag is new and not feature_flag_state_logs' do
let(:milestone) { "14.6" }
+ before do
+ stub_version('14.5.123', 'deadbeef')
+ end
+
it { is_expected.to be_truthy }
end
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 7200ff3c4db..44bbbe49cd3 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -11,13 +11,27 @@ module Gitlab
allow_any_instance_of(ApplicationSetting).to receive(:current).and_return(::ApplicationSetting.create_from_defaults)
end
- shared_examples_for 'renders correct asciidoc' do
- context "without project" do
- let(:input) { '<b>ascii</b>' }
- let(:context) { {} }
- let(:html) { 'H<sub>2</sub>O' }
+ context "without project" do
+ let(:input) { '<b>ascii</b>' }
+ let(:context) { {} }
+ let(:html) { 'H<sub>2</sub>O' }
+
+ it "converts the input using Asciidoctor and default options" do
+ expected_asciidoc_opts = {
+ safe: :secure,
+ backend: :gitlab_html5,
+ attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }),
+ extensions: be_a(Proc)
+ }
+
+ expect(Asciidoctor).to receive(:convert)
+ .with(input, expected_asciidoc_opts).and_return(html)
+
+ expect(render(input, context)).to eq(html)
+ end
- it "converts the input using Asciidoctor and default options" do
+ context "with asciidoc_opts" do
+ it "merges the options with default ones" do
expected_asciidoc_opts = {
safe: :secure,
backend: :gitlab_html5,
@@ -28,845 +42,808 @@ module Gitlab
expect(Asciidoctor).to receive(:convert)
.with(input, expected_asciidoc_opts).and_return(html)
- expect(render(input, context)).to eq(html)
+ render(input, context)
end
+ end
- context "with asciidoc_opts" do
- it "merges the options with default ones" do
- expected_asciidoc_opts = {
- safe: :secure,
- backend: :gitlab_html5,
- attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }),
- extensions: be_a(Proc)
- }
+ context "with requested path" do
+ input = <<~ADOC
+ Document name: {docname}.
+ ADOC
+
+ it "ignores {docname} when not available" do
+ expect(render(input, {})).to include(input.strip)
+ end
+
+ [
+ ['/', '', 'root'],
+ ['README', 'README', 'just a filename'],
+ ['doc/api/', '', 'a directory'],
+ ['doc/api/README.adoc', 'README', 'a complete path']
+ ].each do |path, basename, desc|
+ it "sets {docname} for #{desc}" do
+ expect(render(input, { requested_path: path })).to include(": #{basename}.")
+ end
+ end
+ end
- expect(Asciidoctor).to receive(:convert)
- .with(input, expected_asciidoc_opts).and_return(html)
+ context "XSS" do
+ items = {
+ 'link with extra attribute' => {
+ input: 'link:mylink"onmouseover="alert(1)[Click Here]',
+ output: "<div>\n<p><a href=\"mylink\">Click Here</a></p>\n</div>"
+ },
+ 'link with unsafe scheme' => {
+ input: 'link:data://danger[Click Here]',
+ output: "<div>\n<p><a>Click Here</a></p>\n</div>"
+ },
+ 'image with onerror' => {
+ input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]',
+ output: "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt='Alt text\" onerror=\"alert(7)' class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
+ }
+ }
- render(input, context)
+ items.each do |name, data|
+ it "does not convert dangerous #{name} into HTML" do
+ expect(render(data[:input], context)).to include(data[:output])
end
end
- context "with requested path" do
+ # `stub_feature_flags method` runs AFTER declaration of `items` above.
+ # So the spec in its current implementation won't pass.
+ # Move this test back to the items hash when removing `use_cmark_renderer` feature flag.
+ it "does not convert dangerous fenced code with inline script into HTML" do
+ input = '```mypre"><script>alert(3)</script>'
+ output = "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
+
+ expect(render(input, context)).to include(output)
+ end
+
+ it 'does not allow locked attributes to be overridden' do
input = <<~ADOC
- Document name: {docname}.
+ {counter:max-include-depth:1234}
+ <|-- {max-include-depth}
ADOC
- it "ignores {docname} when not available" do
- expect(render(input, {})).to include(input.strip)
- end
+ expect(render(input, {})).not_to include('1234')
+ end
+ end
- [
- ['/', '', 'root'],
- ['README', 'README', 'just a filename'],
- ['doc/api/', '', 'a directory'],
- ['doc/api/README.adoc', 'README', 'a complete path']
- ].each do |path, basename, desc|
- it "sets {docname} for #{desc}" do
- expect(render(input, { requested_path: path })).to include(": #{basename}.")
- end
- end
+ context "images" do
+ it "does lazy load and link image" do
+ input = 'image:https://localhost.com/image.png[]'
+ output = "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
+ expect(render(input, context)).to include(output)
end
- context "XSS" do
- items = {
- 'link with extra attribute' => {
- input: 'link:mylink"onmouseover="alert(1)[Click Here]',
- output: "<div>\n<p><a href=\"mylink\">Click Here</a></p>\n</div>"
- },
- 'link with unsafe scheme' => {
- input: 'link:data://danger[Click Here]',
- output: "<div>\n<p><a>Click Here</a></p>\n</div>"
- },
- 'image with onerror' => {
- input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]',
- output: "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt='Alt text\" onerror=\"alert(7)' class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
- }
- }
+ it "does not automatically link image if link is explicitly defined" do
+ input = 'image:https://localhost.com/image.png[link=https://gitlab.com]'
+ output = "<div>\n<p><span><a href=\"https://gitlab.com\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
+ expect(render(input, context)).to include(output)
+ end
+ end
- items.each do |name, data|
- it "does not convert dangerous #{name} into HTML" do
- expect(render(data[:input], context)).to include(data[:output])
- end
- end
+ context 'with admonition' do
+ it 'preserves classes' do
+ input = <<~ADOC
+ NOTE: An admonition paragraph, like this note, grabs the reader’s attention.
+ ADOC
- # `stub_feature_flags method` runs AFTER declaration of `items` above.
- # So the spec in its current implementation won't pass.
- # Move this test back to the items hash when removing `use_cmark_renderer` feature flag.
- it "does not convert dangerous fenced code with inline script into HTML" do
- input = '```mypre"><script>alert(3)</script>'
- output =
- if Feature.enabled?(:use_cmark_renderer, default_enabled: :yaml)
- "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
- else
- "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"&gt;</span></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
- end
+ output = <<~HTML
+ <div class="admonitionblock">
+ <table>
+ <tr>
+ <td class="icon">
+ <i class="fa icon-note" title="Note"></i>
+ </td>
+ <td>
+ An admonition paragraph, like this note, grabs the reader’s attention.
+ </td>
+ </tr>
+ </table>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
- expect(render(input, context)).to include(output)
- end
+ context 'with passthrough' do
+ it 'removes non heading ids' do
+ input = <<~ADOC
+ ++++
+ <h2 id="foo">Title</h2>
+ ++++
+ ADOC
- it 'does not allow locked attributes to be overridden' do
- input = <<~ADOC
- {counter:max-include-depth:1234}
- <|-- {max-include-depth}
- ADOC
+ output = <<~HTML
+ <h2>Title</h2>
+ HTML
- expect(render(input, {})).not_to include('1234')
- end
+ expect(render(input, context)).to include(output.strip)
end
- context "images" do
- it "does lazy load and link image" do
- input = 'image:https://localhost.com/image.png[]'
- output = "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
- expect(render(input, context)).to include(output)
- end
+ it 'removes non footnote def ids' do
+ input = <<~ADOC
+ ++++
+ <div id="def">Footnote definition</div>
+ ++++
+ ADOC
- it "does not automatically link image if link is explicitly defined" do
- input = 'image:https://localhost.com/image.png[link=https://gitlab.com]'
- output = "<div>\n<p><span><a href=\"https://gitlab.com\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>"
- expect(render(input, context)).to include(output)
- end
+ output = <<~HTML
+ <div>Footnote definition</div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
- context 'with admonition' do
- it 'preserves classes' do
- input = <<~ADOC
- NOTE: An admonition paragraph, like this note, grabs the reader’s attention.
- ADOC
+ it 'removes non footnote ref ids' do
+ input = <<~ADOC
+ ++++
+ <a id="ref">Footnote reference</a>
+ ++++
+ ADOC
- output = <<~HTML
- <div class="admonitionblock">
- <table>
- <tr>
- <td class="icon">
- <i class="fa icon-note" title="Note"></i>
- </td>
- <td>
- An admonition paragraph, like this note, grabs the reader’s attention.
- </td>
- </tr>
- </table>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ output = <<~HTML
+ <a>Footnote reference</a>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with passthrough' do
- it 'removes non heading ids' do
- input = <<~ADOC
- ++++
- <h2 id="foo">Title</h2>
- ++++
- ADOC
+ context 'with footnotes' do
+ it 'preserves ids and links' do
+ input = <<~ADOC
+ This paragraph has a footnote.footnote:[This is the text of the footnote.]
+ ADOC
- output = <<~HTML
- <h2>Title</h2>
- HTML
+ output = <<~HTML
+ <div>
+ <p>This paragraph has a footnote.<sup>[<a id="_footnoteref_1" href="#_footnotedef_1" title="View footnote.">1</a>]</sup></p>
+ </div>
+ <div>
+ <hr>
+ <div id="_footnotedef_1">
+ <a href="#_footnoteref_1">1</a>. This is the text of the footnote.
+ </div>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
- expect(render(input, context)).to include(output.strip)
- end
+ context 'with section anchors' do
+ it 'preserves ids and links' do
+ input = <<~ADOC
+ = Title
- it 'removes non footnote def ids' do
- input = <<~ADOC
- ++++
- <div id="def">Footnote definition</div>
- ++++
- ADOC
+ == First section
- output = <<~HTML
- <div>Footnote definition</div>
- HTML
+ This is the first section.
- expect(render(input, context)).to include(output.strip)
- end
+ == Second section
- it 'removes non footnote ref ids' do
- input = <<~ADOC
- ++++
- <a id="ref">Footnote reference</a>
- ++++
- ADOC
+ This is the second section.
- output = <<~HTML
- <a>Footnote reference</a>
- HTML
+ == Thunder âš¡ !
- expect(render(input, context)).to include(output.strip)
- end
+ This is the third section.
+ ADOC
+
+ output = <<~HTML
+ <h1>Title</h1>
+ <div>
+ <h2 id="user-content-first-section">
+ <a class="anchor" href="#user-content-first-section"></a>First section</h2>
+ <div>
+ <div>
+ <p>This is the first section.</p>
+ </div>
+ </div>
+ </div>
+ <div>
+ <h2 id="user-content-second-section">
+ <a class="anchor" href="#user-content-second-section"></a>Second section</h2>
+ <div>
+ <div>
+ <p>This is the second section.</p>
+ </div>
+ </div>
+ </div>
+ <div>
+ <h2 id="user-content-thunder">
+ <a class="anchor" href="#user-content-thunder"></a>Thunder âš¡ !</h2>
+ <div>
+ <div>
+ <p>This is the third section.</p>
+ </div>
+ </div>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with footnotes' do
- it 'preserves ids and links' do
- input = <<~ADOC
- This paragraph has a footnote.footnote:[This is the text of the footnote.]
- ADOC
+ context 'with xrefs' do
+ it 'preserves ids' do
+ input = <<~ADOC
+ Learn how to xref:cross-references[use cross references].
- output = <<~HTML
- <div>
- <p>This paragraph has a footnote.<sup>[<a id="_footnoteref_1" href="#_footnotedef_1" title="View footnote.">1</a>]</sup></p>
- </div>
- <div>
- <hr>
- <div id="_footnotedef_1">
- <a href="#_footnoteref_1">1</a>. This is the text of the footnote.
- </div>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ [[cross-references]]A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref).
+ ADOC
+
+ output = <<~HTML
+ <div>
+ <p>Learn how to <a href="#cross-references">use cross references</a>.</p>
+ </div>
+ <div>
+ <p><a id="user-content-cross-references"></a>A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref).</p>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with section anchors' do
- it 'preserves ids and links' do
- input = <<~ADOC
- = Title
-
- == First section
-
- This is the first section.
-
- == Second section
-
- This is the second section.
-
- == Thunder âš¡ !
-
- This is the third section.
- ADOC
+ context 'with checklist' do
+ it 'preserves classes' do
+ input = <<~ADOC
+ * [x] checked
+ * [ ] not checked
+ ADOC
- output = <<~HTML
- <h1>Title</h1>
- <div>
- <h2 id="user-content-first-section">
- <a class="anchor" href="#user-content-first-section"></a>First section</h2>
- <div>
- <div>
- <p>This is the first section.</p>
- </div>
- </div>
- </div>
- <div>
- <h2 id="user-content-second-section">
- <a class="anchor" href="#user-content-second-section"></a>Second section</h2>
- <div>
- <div>
- <p>This is the second section.</p>
- </div>
- </div>
- </div>
- <div>
- <h2 id="user-content-thunder">
- <a class="anchor" href="#user-content-thunder"></a>Thunder âš¡ !</h2>
- <div>
- <div>
- <p>This is the third section.</p>
- </div>
- </div>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ output = <<~HTML
+ <div>
+ <ul class="checklist">
+ <li>
+ <p><i class="fa fa-check-square-o"></i> checked</p>
+ </li>
+ <li>
+ <p><i class="fa fa-square-o"></i> not checked</p>
+ </li>
+ </ul>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with xrefs' do
- it 'preserves ids' do
- input = <<~ADOC
- Learn how to xref:cross-references[use cross references].
-
- [[cross-references]]A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref).
- ADOC
+ context 'with marks' do
+ it 'preserves classes' do
+ input = <<~ADOC
+ Werewolves are allergic to #cassia cinnamon#.
- output = <<~HTML
- <div>
- <p>Learn how to <a href="#cross-references">use cross references</a>.</p>
- </div>
- <div>
- <p><a id="user-content-cross-references"></a>A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref).</p>
- </div>
- HTML
+ Did the werewolves read the [.small]#small print#?
- expect(render(input, context)).to include(output.strip)
- end
+ Where did all the [.underline.small]#cores# run off to?
+
+ We need [.line-through]#ten# make that twenty VMs.
+
+ [.big]##O##nce upon an infinite loop.
+ ADOC
+
+ output = <<~HTML
+ <div>
+ <p>Werewolves are allergic to <mark>cassia cinnamon</mark>.</p>
+ </div>
+ <div>
+ <p>Did the werewolves read the <span class="small">small print</span>?</p>
+ </div>
+ <div>
+ <p>Where did all the <span class="underline small">cores</span> run off to?</p>
+ </div>
+ <div>
+ <p>We need <span class="line-through">ten</span> make that twenty VMs.</p>
+ </div>
+ <div>
+ <p><span class="big">O</span>nce upon an infinite loop.</p>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with checklist' do
- it 'preserves classes' do
- input = <<~ADOC
- * [x] checked
- * [ ] not checked
- ADOC
+ context 'with fenced block' do
+ it 'highlights syntax' do
+ input = <<~ADOC
+ ```js
+ console.log('hello world')
+ ```
+ ADOC
- output = <<~HTML
- <div>
- <ul class="checklist">
- <li>
- <p><i class="fa fa-check-square-o"></i> checked</p>
- </li>
- <li>
- <p><i class="fa fa-square-o"></i> not checked</p>
- </li>
- </ul>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ output = <<~HTML
+ <div>
+ <div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </div>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with marks' do
- it 'preserves classes' do
- input = <<~ADOC
- Werewolves are allergic to #cassia cinnamon#.
-
- Did the werewolves read the [.small]#small print#?
-
- Where did all the [.underline.small]#cores# run off to?
-
- We need [.line-through]#ten# make that twenty VMs.
-
- [.big]##O##nce upon an infinite loop.
- ADOC
+ context 'with listing block' do
+ it 'highlights syntax' do
+ input = <<~ADOC
+ [source,c++]
+ .class.cpp
+ ----
+ #include <stdio.h>
- output = <<~HTML
- <div>
- <p>Werewolves are allergic to <mark>cassia cinnamon</mark>.</p>
- </div>
- <div>
- <p>Did the werewolves read the <span class="small">small print</span>?</p>
- </div>
- <div>
- <p>Where did all the <span class="underline small">cores</span> run off to?</p>
- </div>
- <div>
- <p>We need <span class="line-through">ten</span> make that twenty VMs.</p>
- </div>
- <div>
- <p><span class="big">O</span>nce upon an infinite loop.</p>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ for (int i = 0; i < 5; i++) {
+ std::cout<<"*"<<std::endl;
+ }
+ ----
+ ADOC
+
+ output = <<~HTML
+ <div>
+ <div>class.cpp</div>
+ <div>
+ <div class="gl-relative markdown-code-block js-markdown-code">
+ <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
+ <span id="LC2" class="line" lang="cpp"></span>
+ <span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
+ <span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
+ <span id="LC5" class="line" lang="cpp"><span class="p">}</span></span></code></pre>
+ <copy-code></copy-code>
+ </div>
+ </div>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with fenced block' do
- it 'highlights syntax' do
- input = <<~ADOC
- ```js
- console.log('hello world')
- ```
- ADOC
+ context 'with stem block' do
+ it 'does not apply syntax highlighting' do
+ input = <<~ADOC
+ [stem]
+ ++++
+ \sqrt{4} = 2
+ ++++
+ ADOC
- output = <<~HTML
- <div>
- <div>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
- <copy-code></copy-code>
- </div>
- </div>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ output = "<div>\n<div>\n\\$ qrt{4} = 2\\$\n</div>\n</div>"
+
+ expect(render(input, context)).to include(output)
end
+ end
- context 'with listing block' do
- it 'highlights syntax' do
- input = <<~ADOC
- [source,c++]
- .class.cpp
- ----
- #include <stdio.h>
-
- for (int i = 0; i < 5; i++) {
- std::cout<<"*"<<std::endl;
- }
- ----
- ADOC
+ context 'external links' do
+ it 'adds the `rel` attribute to the link' do
+ output = render('link:https://google.com[Google]', context)
- output = <<~HTML
- <div>
- <div>class.cpp</div>
- <div>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
- <span id="LC2" class="line" lang="cpp"></span>
- <span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
- <span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
- <span id="LC5" class="line" lang="cpp"><span class="p">}</span></span></code></pre>
- <copy-code></copy-code>
- </div>
- </div>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ expect(output).to include('rel="nofollow noreferrer noopener"')
end
+ end
- context 'with stem block' do
- it 'does not apply syntax highlighting' do
- input = <<~ADOC
- [stem]
- ++++
- \sqrt{4} = 2
- ++++
- ADOC
+ context 'LaTex code' do
+ it 'adds class js-render-math to the output' do
+ input = <<~MD
+ :stem: latexmath
- output = "<div>\n<div>\n\\$ qrt{4} = 2\\$\n</div>\n</div>"
+ [stem]
+ ++++
+ \sqrt{4} = 2
+ ++++
- expect(render(input, context)).to include(output)
- end
+ another part
+
+ [latexmath]
+ ++++
+ \beta_x \gamma
+ ++++
+
+ stem:[2+2] is 4
+ MD
+
+ expect(render(input, context)).to include('<pre data-math-style="display" class="code math js-render-math"><code>eta_x gamma</code></pre>')
+ expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>')
end
+ end
- context 'external links' do
- it 'adds the `rel` attribute to the link' do
- output = render('link:https://google.com[Google]', context)
+ context 'outfilesuffix' do
+ it 'defaults to adoc' do
+ output = render("Inter-document reference <<README.adoc#>>", context)
- expect(output).to include('rel="nofollow noreferrer noopener"')
- end
+ expect(output).to include("a href=\"README.adoc\"")
end
+ end
- context 'LaTex code' do
- it 'adds class js-render-math to the output' do
- input = <<~MD
- :stem: latexmath
-
- [stem]
- ++++
- \sqrt{4} = 2
- ++++
-
- another part
-
- [latexmath]
- ++++
- \beta_x \gamma
- ++++
-
- stem:[2+2] is 4
- MD
-
- expect(render(input, context)).to include('<pre data-math-style="display" class="code math js-render-math"><code>eta_x gamma</code></pre>')
- expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>')
- end
+ context 'with mermaid diagrams' do
+ it 'adds class js-render-mermaid to the output' do
+ input = <<~MD
+ [mermaid]
+ ....
+ graph LR
+ A[Square Rect] -- Link text --> B((Circle))
+ A --> C(Round Rect)
+ B --> D{Rhombus}
+ C --> D
+ ....
+ MD
+
+ output = <<~HTML
+ <pre data-mermaid-style="display" class="js-render-mermaid">graph LR
+ A[Square Rect] -- Link text --&gt; B((Circle))
+ A --&gt; C(Round Rect)
+ B --&gt; D{Rhombus}
+ C --&gt; D</pre>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
end
- context 'outfilesuffix' do
- it 'defaults to adoc' do
- output = render("Inter-document reference <<README.adoc#>>", context)
+ it 'applies subs in diagram block' do
+ input = <<~MD
+ :class-name: AveryLongClass
- expect(output).to include("a href=\"README.adoc\"")
- end
- end
+ [mermaid,subs=+attributes]
+ ....
+ classDiagram
+ Class01 <|-- {class-name} : Cool
+ ....
+ MD
- context 'with mermaid diagrams' do
- it 'adds class js-render-mermaid to the output' do
- input = <<~MD
- [mermaid]
- ....
- graph LR
- A[Square Rect] -- Link text --> B((Circle))
- A --> C(Round Rect)
- B --> D{Rhombus}
- C --> D
- ....
- MD
-
- output = <<~HTML
- <pre data-mermaid-style="display" class="js-render-mermaid">graph LR
- A[Square Rect] -- Link text --&gt; B((Circle))
- A --&gt; C(Round Rect)
- B --&gt; D{Rhombus}
- C --&gt; D</pre>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ output = <<~HTML
+ <pre data-mermaid-style="display" class="js-render-mermaid">classDiagram
+ Class01 &lt;|-- AveryLongClass : Cool</pre>
+ HTML
- it 'applies subs in diagram block' do
- input = <<~MD
- :class-name: AveryLongClass
-
- [mermaid,subs=+attributes]
- ....
- classDiagram
- Class01 <|-- {class-name} : Cool
- ....
- MD
-
- output = <<~HTML
- <pre data-mermaid-style="display" class="js-render-mermaid">classDiagram
- Class01 &lt;|-- AveryLongClass : Cool</pre>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ expect(render(input, context)).to include(output.strip)
end
+ end
- context 'with Kroki enabled' do
- before do
- allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true)
- allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io')
- end
-
- it 'converts a graphviz diagram to image' do
- input = <<~ADOC
- [graphviz]
- ....
- digraph G {
- Hello->World
- }
- ....
- ADOC
+ context 'with Kroki enabled' do
+ before do
+ allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true)
+ allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io')
+ end
- output = <<~HTML
- <div>
- <div>
- <a class="no-attachment-icon" href="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka"></a>
- </div>
- </div>
- HTML
+ it 'converts a graphviz diagram to image' do
+ input = <<~ADOC
+ [graphviz]
+ ....
+ digraph G {
+ Hello->World
+ }
+ ....
+ ADOC
- expect(render(input, context)).to include(output.strip)
- end
+ output = <<~HTML
+ <div>
+ <div>
+ <a class="no-attachment-icon" href="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka"></a>
+ </div>
+ </div>
+ HTML
- it 'does not convert a blockdiag diagram to image' do
- input = <<~ADOC
- [blockdiag]
- ....
- blockdiag {
- Kroki -> generates -> "Block diagrams";
- Kroki -> is -> "very easy!";
-
- Kroki [color = "greenyellow"];
- "Block diagrams" [color = "pink"];
- "very easy!" [color = "orange"];
- }
- ....
- ADOC
+ expect(render(input, context)).to include(output.strip)
+ end
- output = <<~HTML
- <div>
- <div>
- <pre>blockdiag {
- Kroki -&gt; generates -&gt; "Block diagrams";
- Kroki -&gt; is -&gt; "very easy!";
-
- Kroki [color = "greenyellow"];
- "Block diagrams" [color = "pink"];
- "very easy!" [color = "orange"];
- }</pre>
- </div>
- </div>
- HTML
-
- expect(render(input, context)).to include(output.strip)
- end
+ it 'does not convert a blockdiag diagram to image' do
+ input = <<~ADOC
+ [blockdiag]
+ ....
+ blockdiag {
+ Kroki -> generates -> "Block diagrams";
+ Kroki -> is -> "very easy!";
+
+ Kroki [color = "greenyellow"];
+ "Block diagrams" [color = "pink"];
+ "very easy!" [color = "orange"];
+ }
+ ....
+ ADOC
- it 'does not allow kroki-plantuml-include to be overridden' do
- input = <<~ADOC
- [plantuml, test="{counter:kroki-plantuml-include:/etc/passwd}", format="png"]
- ....
- class BlockProcessor
-
- BlockProcessor <|-- {counter:kroki-plantuml-include}
- ....
- ADOC
+ output = <<~HTML
+ <div>
+ <div>
+ <pre>blockdiag {
+ Kroki -&gt; generates -&gt; "Block diagrams";
+ Kroki -&gt; is -&gt; "very easy!";
+
+ Kroki [color = "greenyellow"];
+ "Block diagrams" [color = "pink"];
+ "very easy!" [color = "orange"];
+ }</pre>
+ </div>
+ </div>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
- output = <<~HTML
- <div>
- <div>
- <a class=\"no-attachment-icon\" href=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"Diagram\" class=\"lazy\" data-src=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\"></a>
- </div>
- </div>
- HTML
+ it 'does not allow kroki-plantuml-include to be overridden' do
+ input = <<~ADOC
+ [plantuml, test="{counter:kroki-plantuml-include:/etc/passwd}", format="png"]
+ ....
+ class BlockProcessor
- expect(render(input, {})).to include(output.strip)
- end
+ BlockProcessor <|-- {counter:kroki-plantuml-include}
+ ....
+ ADOC
- it 'does not allow kroki-server-url to be overridden' do
- input = <<~ADOC
- [plantuml, test="{counter:kroki-server-url:evilsite}", format="png"]
- ....
- class BlockProcessor
-
- BlockProcessor
- ....
- ADOC
+ output = <<~HTML
+ <div>
+ <div>
+ <a class=\"no-attachment-icon\" href=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"Diagram\" class=\"lazy\" data-src=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\"></a>
+ </div>
+ </div>
+ HTML
- expect(render(input, {})).not_to include('evilsite')
- end
+ expect(render(input, {})).to include(output.strip)
end
- context 'with Kroki and BlockDiag (additional format) enabled' do
- before do
- allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true)
- allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io')
- allow_any_instance_of(ApplicationSetting).to receive(:kroki_formats_blockdiag).and_return(true)
- end
-
- it 'converts a blockdiag diagram to image' do
- input = <<~ADOC
- [blockdiag]
- ....
- blockdiag {
- Kroki -> generates -> "Block diagrams";
- Kroki -> is -> "very easy!";
-
- Kroki [color = "greenyellow"];
- "Block diagrams" [color = "pink"];
- "very easy!" [color = "orange"];
- }
- ....
- ADOC
+ it 'does not allow kroki-server-url to be overridden' do
+ input = <<~ADOC
+ [plantuml, test="{counter:kroki-server-url:evilsite}", format="png"]
+ ....
+ class BlockProcessor
- output = <<~HTML
- <div>
- <div>
- <a class="no-attachment-icon" href="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w==" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w=="></a>
- </div>
- </div>
- HTML
+ BlockProcessor
+ ....
+ ADOC
- expect(render(input, context)).to include(output.strip)
- end
+ expect(render(input, {})).not_to include('evilsite')
end
end
- context 'with project' do
- let(:context) do
- {
- commit: commit,
- project: project,
- ref: ref,
- requested_path: requested_path
- }
+ context 'with Kroki and BlockDiag (additional format) enabled' do
+ before do
+ allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true)
+ allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io')
+ allow_any_instance_of(ApplicationSetting).to receive(:kroki_formats_blockdiag).and_return(true)
end
- let(:commit) { project.commit(ref) }
- let(:project) { create(:project, :repository) }
- let(:ref) { 'asciidoc' }
- let(:requested_path) { '/' }
+ it 'converts a blockdiag diagram to image' do
+ input = <<~ADOC
+ [blockdiag]
+ ....
+ blockdiag {
+ Kroki -> generates -> "Block diagrams";
+ Kroki -> is -> "very easy!";
+
+ Kroki [color = "greenyellow"];
+ "Block diagrams" [color = "pink"];
+ "very easy!" [color = "orange"];
+ }
+ ....
+ ADOC
- context 'include directive' do
- subject(:output) { render(input, context) }
+ output = <<~HTML
+ <div>
+ <div>
+ <a class="no-attachment-icon" href="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w==" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w=="></a>
+ </div>
+ </div>
+ HTML
- let(:input) { "Include this:\n\ninclude::#{include_path}[]" }
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
+ end
- before do
- current_file = requested_path
- current_file += 'README.adoc' if requested_path.end_with? '/'
+ context 'with project' do
+ let(:context) do
+ {
+ commit: commit,
+ project: project,
+ ref: ref,
+ requested_path: requested_path
+ }
+ end
- create_file(current_file, "= AsciiDoc\n")
- end
+ let(:commit) { project.commit(ref) }
+ let(:project) { create(:project, :repository) }
+ let(:ref) { 'asciidoc' }
+ let(:requested_path) { '/' }
- def many_includes(target)
- Array.new(10, "include::#{target}[]").join("\n")
- end
+ context 'include directive' do
+ subject(:output) { render(input, context) }
- context 'cyclic imports' do
- before do
- create_file('doc/api/a.adoc', many_includes('b.adoc'))
- create_file('doc/api/b.adoc', many_includes('a.adoc'))
- end
+ let(:input) { "Include this:\n\ninclude::#{include_path}[]" }
- let(:include_path) { 'a.adoc' }
- let(:requested_path) { 'doc/api/README.md' }
+ before do
+ current_file = requested_path
+ current_file += 'README.adoc' if requested_path.end_with? '/'
- it 'completes successfully' do
- is_expected.to include('<p>Include this:</p>')
- end
+ create_file(current_file, "= AsciiDoc\n")
+ end
+
+ def many_includes(target)
+ Array.new(10, "include::#{target}[]").join("\n")
+ end
+
+ context 'cyclic imports' do
+ before do
+ create_file('doc/api/a.adoc', many_includes('b.adoc'))
+ create_file('doc/api/b.adoc', many_includes('a.adoc'))
end
- context 'with path to non-existing file' do
- let(:include_path) { 'not-exists.adoc' }
+ let(:include_path) { 'a.adoc' }
+ let(:requested_path) { 'doc/api/README.md' }
- it 'renders Unresolved directive placeholder' do
- is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>")
- end
+ it 'completes successfully' do
+ is_expected.to include('<p>Include this:</p>')
end
+ end
- shared_examples :invalid_include do
- let(:include_path) { 'dk.png' }
+ context 'with path to non-existing file' do
+ let(:include_path) { 'not-exists.adoc' }
- before do
- allow(project.repository).to receive(:blob_at).and_return(blob)
- end
+ it 'renders Unresolved directive placeholder' do
+ is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>")
+ end
+ end
- it 'does not read the blob' do
- expect(blob).not_to receive(:data)
- end
+ shared_examples :invalid_include do
+ let(:include_path) { 'dk.png' }
- it 'renders Unresolved directive placeholder' do
- is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>")
- end
+ before do
+ allow(project.repository).to receive(:blob_at).and_return(blob)
end
- context 'with path to a binary file' do
- let(:blob) { fake_blob(path: 'dk.png', binary: true) }
+ it 'does not read the blob' do
+ expect(blob).not_to receive(:data)
+ end
- include_examples :invalid_include
+ it 'renders Unresolved directive placeholder' do
+ is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>")
end
+ end
- context 'with path to file in external storage' do
- let(:blob) { fake_blob(path: 'dk.png', lfs: true) }
+ context 'with path to a binary file' do
+ let(:blob) { fake_blob(path: 'dk.png', binary: true) }
- before do
- allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- project.update_attribute(:lfs_enabled, true)
- end
+ include_examples :invalid_include
+ end
- include_examples :invalid_include
+ context 'with path to file in external storage' do
+ let(:blob) { fake_blob(path: 'dk.png', lfs: true) }
+
+ before do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+ project.update_attribute(:lfs_enabled, true)
end
- context 'with path to a textual file' do
- let(:include_path) { 'sample.adoc' }
+ include_examples :invalid_include
+ end
- before do
- create_file(file_path, "Content from #{include_path}")
- end
+ context 'with path to a textual file' do
+ let(:include_path) { 'sample.adoc' }
- shared_examples :valid_include do
- [
- ['/doc/sample.adoc', 'doc/sample.adoc', 'absolute path'],
- ['sample.adoc', 'doc/api/sample.adoc', 'relative path'],
- ['./sample.adoc', 'doc/api/sample.adoc', 'relative path with leading ./'],
- ['../sample.adoc', 'doc/sample.adoc', 'relative path to a file up one directory'],
- ['../../sample.adoc', 'sample.adoc', 'relative path for a file up multiple directories']
- ].each do |include_path_, file_path_, desc|
- context "the file is specified by #{desc}" do
- let(:include_path) { include_path_ }
- let(:file_path) { file_path_ }
-
- it 'includes content of the file' do
- is_expected.to include('<p>Include this:</p>')
- is_expected.to include("<p>Content from #{include_path}</p>")
- end
+ before do
+ create_file(file_path, "Content from #{include_path}")
+ end
+
+ shared_examples :valid_include do
+ [
+ ['/doc/sample.adoc', 'doc/sample.adoc', 'absolute path'],
+ ['sample.adoc', 'doc/api/sample.adoc', 'relative path'],
+ ['./sample.adoc', 'doc/api/sample.adoc', 'relative path with leading ./'],
+ ['../sample.adoc', 'doc/sample.adoc', 'relative path to a file up one directory'],
+ ['../../sample.adoc', 'sample.adoc', 'relative path for a file up multiple directories']
+ ].each do |include_path_, file_path_, desc|
+ context "the file is specified by #{desc}" do
+ let(:include_path) { include_path_ }
+ let(:file_path) { file_path_ }
+
+ it 'includes content of the file' do
+ is_expected.to include('<p>Include this:</p>')
+ is_expected.to include("<p>Content from #{include_path}</p>")
end
end
end
+ end
- context 'when requested path is a file in the repo' do
- let(:requested_path) { 'doc/api/README.adoc' }
+ context 'when requested path is a file in the repo' do
+ let(:requested_path) { 'doc/api/README.adoc' }
- include_examples :valid_include
+ include_examples :valid_include
- context 'without a commit (only ref)' do
- let(:commit) { nil }
+ context 'without a commit (only ref)' do
+ let(:commit) { nil }
- include_examples :valid_include
- end
+ include_examples :valid_include
end
+ end
- context 'when requested path is a directory in the repo' do
- let(:requested_path) { 'doc/api/' }
+ context 'when requested path is a directory in the repo' do
+ let(:requested_path) { 'doc/api/' }
- include_examples :valid_include
+ include_examples :valid_include
- context 'without a commit (only ref)' do
- let(:commit) { nil }
+ context 'without a commit (only ref)' do
+ let(:commit) { nil }
- include_examples :valid_include
- end
+ include_examples :valid_include
end
end
+ end
- context 'when repository is passed into the context' do
- let(:wiki_repo) { project.wiki.repository }
- let(:include_path) { 'wiki_file.adoc' }
+ context 'when repository is passed into the context' do
+ let(:wiki_repo) { project.wiki.repository }
+ let(:include_path) { 'wiki_file.adoc' }
+ before do
+ project.create_wiki
+ context.merge!(repository: wiki_repo)
+ end
+
+ context 'when the file exists' do
before do
- project.create_wiki
- context.merge!(repository: wiki_repo)
+ create_file(include_path, 'Content from wiki', repository: wiki_repo)
end
- context 'when the file exists' do
- before do
- create_file(include_path, 'Content from wiki', repository: wiki_repo)
- end
+ it { is_expected.to include('<p>Content from wiki</p>') }
+ end
- it { is_expected.to include('<p>Content from wiki</p>') }
- end
+ context 'when the file does not exist' do
+ it { is_expected.to include("[ERROR: include::#{include_path}[] - unresolved directive]")}
+ end
+ end
- context 'when the file does not exist' do
- it { is_expected.to include("[ERROR: include::#{include_path}[] - unresolved directive]")}
- end
+ context 'recursive includes with relative paths' do
+ let(:input) do
+ <<~ADOC
+ Source: requested file
+
+ include::doc/README.adoc[]
+
+ include::license.adoc[]
+ ADOC
end
- context 'recursive includes with relative paths' do
- let(:input) do
- <<~ADOC
- Source: requested file
-
- include::doc/README.adoc[]
-
- include::license.adoc[]
- ADOC
- end
+ before do
+ create_file 'doc/README.adoc', <<~ADOC
+ Source: doc/README.adoc
- before do
- create_file 'doc/README.adoc', <<~ADOC
- Source: doc/README.adoc
-
- include::../license.adoc[]
-
- include::api/hello.adoc[]
- ADOC
- create_file 'license.adoc', <<~ADOC
- Source: license.adoc
- ADOC
- create_file 'doc/api/hello.adoc', <<~ADOC
- Source: doc/api/hello.adoc
-
- include::./common.adoc[]
- ADOC
- create_file 'doc/api/common.adoc', <<~ADOC
- Source: doc/api/common.adoc
- ADOC
- end
+ include::../license.adoc[]
- it 'includes content of the included files recursively' do
- expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip
- Source: requested file
- Source: doc/README.adoc
- Source: license.adoc
- Source: doc/api/hello.adoc
- Source: doc/api/common.adoc
- Source: license.adoc
- ADOC
- end
+ include::api/hello.adoc[]
+ ADOC
+ create_file 'license.adoc', <<~ADOC
+ Source: license.adoc
+ ADOC
+ create_file 'doc/api/hello.adoc', <<~ADOC
+ Source: doc/api/hello.adoc
+
+ include::./common.adoc[]
+ ADOC
+ create_file 'doc/api/common.adoc', <<~ADOC
+ Source: doc/api/common.adoc
+ ADOC
end
- def create_file(path, content, repository: project.repository)
- repository.create_file(project.creator, path, content,
- message: "Add #{path}", branch_name: 'asciidoc')
+ it 'includes content of the included files recursively' do
+ expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip
+ Source: requested file
+ Source: doc/README.adoc
+ Source: license.adoc
+ Source: doc/api/hello.adoc
+ Source: doc/api/common.adoc
+ Source: license.adoc
+ ADOC
end
end
- end
- end
- context 'using ruby-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: false)
- end
-
- it_behaves_like 'renders correct asciidoc'
- end
-
- context 'using c-based HTML renderer' do
- before do
- stub_feature_flags(use_cmark_renderer: true)
+ def create_file(path, content, repository: project.repository)
+ repository.create_file(project.creator, path, content,
+ message: "Add #{path}", branch_name: 'asciidoc')
+ end
end
-
- it_behaves_like 'renders correct asciidoc'
end
def render(*args)
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index f1c891b2adb..e985f66bfe9 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -939,21 +939,19 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
describe '#cluster_agent_token_from_authorization_token' do
- let_it_be(:agent_token, freeze: true) { create(:cluster_agent_token) }
+ let_it_be(:agent_token) { create(:cluster_agent_token) }
+
+ subject { cluster_agent_token_from_authorization_token }
context 'when route_setting is empty' do
- it 'returns nil' do
- expect(cluster_agent_token_from_authorization_token).to be_nil
- end
+ it { is_expected.to be_nil }
end
context 'when route_setting allows cluster agent token' do
let(:route_authentication_setting) { { cluster_agent_token_allowed: true } }
context 'Authorization header is empty' do
- it 'returns nil' do
- expect(cluster_agent_token_from_authorization_token).to be_nil
- end
+ it { is_expected.to be_nil }
end
context 'Authorization header is incorrect' do
@@ -961,9 +959,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
request.headers['Authorization'] = 'Bearer ABCD'
end
- it 'returns nil' do
- expect(cluster_agent_token_from_authorization_token).to be_nil
- end
+ it { is_expected.to be_nil }
end
context 'Authorization header is malformed' do
@@ -971,9 +967,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
request.headers['Authorization'] = 'Bearer'
end
- it 'returns nil' do
- expect(cluster_agent_token_from_authorization_token).to be_nil
- end
+ it { is_expected.to be_nil }
end
context 'Authorization header matches agent token' do
@@ -981,8 +975,14 @@ RSpec.describe Gitlab::Auth::AuthFinders do
request.headers['Authorization'] = "Bearer #{agent_token.token}"
end
- it 'returns the agent token' do
- expect(cluster_agent_token_from_authorization_token).to eq(agent_token)
+ it { is_expected.to eq(agent_token) }
+
+ context 'agent token has been revoked' do
+ before do
+ agent_token.revoked!
+ end
+
+ it { is_expected.to be_nil }
end
end
end
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index 7a657cce597..3039fce6141 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -121,10 +121,40 @@ AtlErSqafbECNDSwS5BX8yDpu5yRBJ4xegO/rNlmb8ICRYkuJapD1xXicFOsmfUK
expect(config.adapter_options).to eq(
host: 'ldap.example.com',
port: 386,
+ hosts: nil,
encryption: nil
)
end
+ it 'includes failover hosts when set' do
+ stub_ldap_config(
+ options: {
+ 'host' => 'ldap.example.com',
+ 'port' => 686,
+ 'hosts' => [
+ ['ldap1.example.com', 636],
+ ['ldap2.example.com', 636]
+ ],
+ 'encryption' => 'simple_tls',
+ 'verify_certificates' => true,
+ 'bind_dn' => 'uid=admin,dc=example,dc=com',
+ 'password' => 'super_secret'
+ }
+ )
+
+ expect(config.adapter_options).to include({
+ hosts: [
+ ['ldap1.example.com', 636],
+ ['ldap2.example.com', 636]
+ ],
+ auth: {
+ method: :simple,
+ username: 'uid=admin,dc=example,dc=com',
+ password: 'super_secret'
+ }
+ })
+ end
+
it 'includes authentication options when auth is configured' do
stub_ldap_config(
options: {
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 32e647688ff..611c70d73a1 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
context 'when IP is already banned' do
- subject { gl_auth.find_for_git_client('username', 'password', project: nil, ip: 'ip') }
+ subject { gl_auth.find_for_git_client('username', Gitlab::Password.test_default, project: nil, ip: 'ip') }
before do
expect_next_instance_of(Gitlab::Auth::IpRateLimiter) do |rate_limiter|
@@ -204,16 +204,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'recognizes master passwords' do
- user = create(:user, password: 'password')
+ user = create(:user, password: Gitlab::Password.test_default)
- expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
+ expect(gl_auth.find_for_git_client(user.username, Gitlab::Password.test_default, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
include_examples 'user login operation with unique ip limit' do
- let(:user) { create(:user, password: 'password') }
+ let(:user) { create(:user, password: Gitlab::Password.test_default) }
def operation
- expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
+ expect(gl_auth.find_for_git_client(user.username, Gitlab::Password.test_default, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
end
@@ -477,7 +477,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
:user,
:blocked,
username: 'normal_user',
- password: 'my-secret'
+ password: Gitlab::Password.test_default
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
@@ -486,7 +486,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'when 2fa is enabled globally' do
let_it_be(:user) do
- create(:user, username: 'normal_user', password: 'my-secret', otp_grace_period_started_at: 1.day.ago)
+ create(:user, username: 'normal_user', password: Gitlab::Password.test_default, otp_grace_period_started_at: 1.day.ago)
end
before do
@@ -510,7 +510,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'when 2fa is enabled personally' do
let(:user) do
- create(:user, :two_factor, username: 'normal_user', password: 'my-secret', otp_grace_period_started_at: 1.day.ago)
+ create(:user, :two_factor, username: 'normal_user', password: Gitlab::Password.test_default, otp_grace_period_started_at: 1.day.ago)
end
it 'fails' do
@@ -523,7 +523,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
user = create(
:user,
username: 'normal_user',
- password: 'my-secret'
+ password: Gitlab::Password.test_default
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
@@ -534,7 +534,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
user = create(
:user,
username: 'oauth2',
- password: 'my-secret'
+ password: Gitlab::Password.test_default
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
@@ -609,7 +609,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'when deploy token and user have the same username' do
let(:username) { 'normal_user' }
- let(:user) { create(:user, username: username, password: 'my-secret') }
+ let(:user) { create(:user, username: username, password: Gitlab::Password.test_default) }
let(:deploy_token) { create(:deploy_token, username: username, read_registry: false, projects: [project]) }
it 'succeeds for the token' do
@@ -622,7 +622,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'succeeds for the user' do
auth_success = { actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities }
- expect(gl_auth.find_for_git_client(username, 'my-secret', project: project, ip: 'ip'))
+ expect(gl_auth.find_for_git_client(username, Gitlab::Password.test_default, project: project, ip: 'ip'))
.to have_attributes(auth_success)
end
end
@@ -816,7 +816,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
let(:username) { 'John' } # username isn't lowercase, test this
- let(:password) { 'my-secret' }
+ let(:password) { Gitlab::Password.test_default }
it "finds user by valid login/password" do
expect(gl_auth.find_with_user_password(username, password)).to eql user
@@ -941,13 +941,13 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it "does not find user by using ldap as fallback to for authentication" do
expect(Gitlab::Auth::Ldap::Authentication).to receive(:login).and_return(nil)
- expect(gl_auth.find_with_user_password('ldap_user', 'password')).to be_nil
+ expect(gl_auth.find_with_user_password('ldap_user', Gitlab::Password.test_default)).to be_nil
end
it "find new user by using ldap as fallback to for authentication" do
expect(Gitlab::Auth::Ldap::Authentication).to receive(:login).and_return(user)
- expect(gl_auth.find_with_user_password('ldap_user', 'password')).to eq(user)
+ expect(gl_auth.find_with_user_password('ldap_user', Gitlab::Password.test_default)).to eq(user)
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb
index 6ab1e3ecd70..f5d2224747a 100644
--- a/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20181228175414 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20210301200959 do
subject(:perform) { migration.perform(1, 99) }
let(:migration) { described_class.new }
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb
new file mode 100644
index 00000000000..8980a26932b
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillCiNamespaceMirrors, :migration, schema: 20211208122200 do
+ let(:namespaces) { table(:namespaces) }
+ let(:ci_namespace_mirrors) { table(:ci_namespace_mirrors) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ it 'creates hierarchies for all namespaces in range' do
+ namespaces.create!(id: 5, name: 'test1', path: 'test1')
+ namespaces.create!(id: 7, name: 'test2', path: 'test2')
+ namespaces.create!(id: 8, name: 'test3', path: 'test3')
+
+ subject.perform(5, 7)
+
+ expect(ci_namespace_mirrors.all).to contain_exactly(
+ an_object_having_attributes(namespace_id: 5, traversal_ids: [5]),
+ an_object_having_attributes(namespace_id: 7, traversal_ids: [7])
+ )
+ end
+
+ it 'handles existing hierarchies gracefully' do
+ namespaces.create!(id: 5, name: 'test1', path: 'test1')
+ test2 = namespaces.create!(id: 7, name: 'test2', path: 'test2')
+ namespaces.create!(id: 8, name: 'test3', path: 'test3', parent_id: 7)
+ namespaces.create!(id: 9, name: 'test4', path: 'test4')
+
+ # Simulate a situation where a user has had a chance to move a group to another parent
+ # before the background migration has had a chance to run
+ test2.update!(parent_id: 5)
+ ci_namespace_mirrors.create!(namespace_id: test2.id, traversal_ids: [5, 7])
+
+ subject.perform(5, 8)
+
+ expect(ci_namespace_mirrors.all).to contain_exactly(
+ an_object_having_attributes(namespace_id: 5, traversal_ids: [5]),
+ an_object_having_attributes(namespace_id: 7, traversal_ids: [5, 7]),
+ an_object_having_attributes(namespace_id: 8, traversal_ids: [5, 7, 8])
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb
new file mode 100644
index 00000000000..4eec83879e3
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillCiProjectMirrors, :migration, schema: 20211208122201 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:ci_project_mirrors) { table(:ci_project_mirrors) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ it 'creates ci_project_mirrors for all projects in range' do
+ namespaces.create!(id: 10, name: 'namespace1', path: 'namespace1')
+ projects.create!(id: 5, namespace_id: 10, name: 'test1', path: 'test1')
+ projects.create!(id: 7, namespace_id: 10, name: 'test2', path: 'test2')
+ projects.create!(id: 8, namespace_id: 10, name: 'test3', path: 'test3')
+
+ subject.perform(5, 7)
+
+ expect(ci_project_mirrors.all).to contain_exactly(
+ an_object_having_attributes(project_id: 5, namespace_id: 10),
+ an_object_having_attributes(project_id: 7, namespace_id: 10)
+ )
+ end
+
+ it 'handles existing ci_project_mirrors gracefully' do
+ namespaces.create!(id: 10, name: 'namespace1', path: 'namespace1')
+ namespaces.create!(id: 11, name: 'namespace2', path: 'namespace2', parent_id: 10)
+ projects.create!(id: 5, namespace_id: 10, name: 'test1', path: 'test1')
+ projects.create!(id: 7, namespace_id: 11, name: 'test2', path: 'test2')
+ projects.create!(id: 8, namespace_id: 11, name: 'test3', path: 'test3')
+
+ # Simulate a situation where a user has had a chance to move a project to another namespace
+ # before the background migration has had a chance to run
+ ci_project_mirrors.create!(project_id: 7, namespace_id: 10)
+
+ subject.perform(5, 7)
+
+ expect(ci_project_mirrors.all).to contain_exactly(
+ an_object_having_attributes(project_id: 5, namespace_id: 10),
+ an_object_having_attributes(project_id: 7, namespace_id: 10)
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses_spec.rb b/spec/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses_spec.rb
new file mode 100644
index 00000000000..242da383453
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_incident_issue_escalation_statuses_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillIncidentIssueEscalationStatuses, schema: 20211214012507 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:issuable_escalation_statuses) { table(:incident_management_issuable_escalation_statuses) }
+
+ subject(:migration) { described_class.new }
+
+ it 'correctly backfills issuable escalation status records' do
+ namespace = namespaces.create!(name: 'foo', path: 'foo')
+ project = projects.create!(namespace_id: namespace.id)
+
+ issues.create!(project_id: project.id, title: 'issue 1', issue_type: 0) # non-incident issue
+ issues.create!(project_id: project.id, title: 'incident 1', issue_type: 1)
+ issues.create!(project_id: project.id, title: 'incident 2', issue_type: 1)
+ incident_issue_existing_status = issues.create!(project_id: project.id, title: 'incident 3', issue_type: 1)
+ issuable_escalation_statuses.create!(issue_id: incident_issue_existing_status.id)
+
+ migration.perform(1, incident_issue_existing_status.id)
+
+ expect(issuable_escalation_statuses.count).to eq(3)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
index 446d62bbd2a..65f5f8368df 100644
--- a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20181228175414 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20210301200959 do
let_it_be(:jira_integration_temp) { described_class::JiraServiceTemp }
let_it_be(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
let_it_be(:atlassian_host) { 'https://api.atlassian.net' }
diff --git a/spec/lib/gitlab/background_migration/backfill_project_updated_at_after_repository_storage_move_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_updated_at_after_repository_storage_move_spec.rb
index 708e5e21dbe..ed44b819a97 100644
--- a/spec/lib/gitlab/background_migration/backfill_project_updated_at_after_repository_storage_move_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_project_updated_at_after_repository_storage_move_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectUpdatedAtAfterRepositoryStorageMove, :migration, schema: 20210210093901 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectUpdatedAtAfterRepositoryStorageMove, :migration, schema: 20210301200959 do
let(:projects) { table(:projects) }
let(:project_repository_storage_moves) { table(:project_repository_storage_moves) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
diff --git a/spec/lib/gitlab/background_migration/base_job_spec.rb b/spec/lib/gitlab/background_migration/base_job_spec.rb
new file mode 100644
index 00000000000..86abe4257e4
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/base_job_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BaseJob, '#perform' do
+ let(:connection) { double(:connection) }
+
+ let(:test_job_class) { Class.new(described_class) }
+ let(:test_job) { test_job_class.new(connection: connection) }
+
+ describe '#perform' do
+ it 'raises an error if not overridden by a subclass' do
+ expect { test_job.perform }.to raise_error(NotImplementedError, /must implement perform/)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/cleanup_concurrent_schema_change_spec.rb b/spec/lib/gitlab/background_migration/cleanup_concurrent_schema_change_spec.rb
deleted file mode 100644
index 2931b5e6dd3..00000000000
--- a/spec/lib/gitlab/background_migration/cleanup_concurrent_schema_change_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::CleanupConcurrentSchemaChange do
- describe '#perform' do
- it 'new column does not exist' do
- expect(subject).to receive(:column_exists?).with(:issues, :closed_at_timestamp).and_return(false)
- expect(subject).not_to receive(:column_exists?).with(:issues, :closed_at)
- expect(subject).not_to receive(:define_model_for)
-
- expect(subject.perform(:issues, :closed_at, :closed_at_timestamp)).to be_nil
- end
-
- it 'old column does not exist' do
- expect(subject).to receive(:column_exists?).with(:issues, :closed_at_timestamp).and_return(true)
- expect(subject).to receive(:column_exists?).with(:issues, :closed_at).and_return(false)
- expect(subject).not_to receive(:define_model_for)
-
- expect(subject.perform(:issues, :closed_at, :closed_at_timestamp)).to be_nil
- end
-
- it 'has both old and new columns' do
- expect(subject).to receive(:column_exists?).twice.and_return(true)
-
- expect { subject.perform('issues', :closed_at, :created_at) }.to raise_error(NotImplementedError)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
index b83dc6fff7a..5b6722a3384 100644
--- a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
+++ b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20181228175414 do
+RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210301200959 do
let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let_it_be(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb
new file mode 100644
index 00000000000..94d9f4509a7
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/encrypt_static_object_token_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::EncryptStaticObjectToken do
+ let(:users) { table(:users) }
+ let!(:user_without_tokens) { create_user!(name: 'notoken') }
+ let!(:user_with_plaintext_token_1) { create_user!(name: 'plaintext_1', token: 'token') }
+ let!(:user_with_plaintext_token_2) { create_user!(name: 'plaintext_2', token: 'TOKEN') }
+ let!(:user_with_plaintext_empty_token) { create_user!(name: 'plaintext_3', token: '') }
+ let!(:user_with_encrypted_token) { create_user!(name: 'encrypted', encrypted_token: 'encrypted') }
+ let!(:user_with_both_tokens) { create_user!(name: 'both', token: 'token2', encrypted_token: 'encrypted2') }
+
+ before do
+ allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).and_call_original
+ allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('token') { 'secure_token' }
+ allow(Gitlab::CryptoHelper).to receive(:aes256_gcm_encrypt).with('TOKEN') { 'SECURE_TOKEN' }
+ end
+
+ subject { described_class.new.perform(start_id, end_id) }
+
+ let(:start_id) { users.minimum(:id) }
+ let(:end_id) { users.maximum(:id) }
+
+ it 'backfills encrypted tokens to users with plaintext token only', :aggregate_failures do
+ subject
+
+ new_state = users.pluck(:id, :static_object_token, :static_object_token_encrypted).to_h do |row|
+ [row[0], [row[1], row[2]]]
+ end
+
+ expect(new_state.count).to eq(6)
+
+ expect(new_state[user_with_plaintext_token_1.id]).to match_array(%w[token secure_token])
+ expect(new_state[user_with_plaintext_token_2.id]).to match_array(%w[TOKEN SECURE_TOKEN])
+
+ expect(new_state[user_with_plaintext_empty_token.id]).to match_array(['', nil])
+ expect(new_state[user_without_tokens.id]).to match_array([nil, nil])
+ expect(new_state[user_with_both_tokens.id]).to match_array(%w[token2 encrypted2])
+ expect(new_state[user_with_encrypted_token.id]).to match_array([nil, 'encrypted'])
+ end
+
+ private
+
+ def create_user!(name:, token: nil, encrypted_token: nil)
+ email = "#{name}@example.com"
+
+ table(:users).create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ static_object_token: token,
+ static_object_token_encrypted: encrypted_token
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb b/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb
new file mode 100644
index 00000000000..af551861d47
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/fix_vulnerability_occurrences_with_hashes_as_raw_metadata_spec.rb
@@ -0,0 +1,232 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::FixVulnerabilityOccurrencesWithHashesAsRawMetadata, schema: 20211209203821 do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:identifiers) { table(:vulnerability_identifiers) }
+ let(:findings) { table(:vulnerability_occurrences) }
+
+ let(:user) { users.create!(name: 'Test User', projects_limit: 10, username: 'test-user', email: '1') }
+
+ let(:namespace) do
+ namespaces.create!(
+ owner_id: user.id,
+ name: user.name,
+ path: user.username
+ )
+ end
+
+ let(:project) do
+ projects.create!(namespace_id: namespace.id, name: 'Test Project')
+ end
+
+ let(:scanner) do
+ scanners.create!(
+ project_id: project.id,
+ external_id: 'test-scanner',
+ name: 'Test Scanner',
+ vendor: 'GitLab'
+ )
+ end
+
+ let(:primary_identifier) do
+ identifiers.create!(
+ project_id: project.id,
+ external_type: 'cve',
+ name: 'CVE-2021-1234',
+ external_id: 'CVE-2021-1234',
+ fingerprint: '4c0fe491999f94701ee437588554ef56322ae276'
+ )
+ end
+
+ let(:finding) do
+ findings.create!(
+ raw_metadata: raw_metadata,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: primary_identifier.id,
+ uuid: '4deb090a-bedf-5ccc-aa9a-ac8055a1ea81',
+ project_fingerprint: '1caa750a6dad769a18ad6f40b413b3b6ab1c8d77',
+ location_fingerprint: '6d1f35f53b065238abfcadc01336ce65d112a2bd',
+ name: 'name',
+ report_type: 7,
+ severity: 0,
+ confidence: 0,
+ detection_method: 'gitlab_security_report',
+ metadata_version: 'cluster_image_scanning:1.0',
+ created_at: "2021-12-10 14:27:42 -0600",
+ updated_at: "2021-12-10 14:27:42 -0600"
+ )
+ end
+
+ subject(:perform) { described_class.new.perform(finding.id, finding.id) }
+
+ context 'with stringified hash as raw_metadata' do
+ let(:raw_metadata) do
+ '{:location=>{"image"=>"index.docker.io/library/nginx:latest", "kubernetes_resource"=>{"namespace"=>"production", "kind"=>"deployment", "name"=>"nginx", "container_name"=>"nginx", "agent_id"=>"2"}, "dependency"=>{"package"=>{"name"=>"libc"}, "version"=>"v1.2.3"}}}'
+ end
+
+ it 'converts stringified hash to JSON' do
+ expect { perform }.not_to raise_error
+
+ result = finding.reload.raw_metadata
+ metadata = Oj.load(result)
+ expect(metadata).to eq(
+ {
+ 'location' => {
+ 'image' => 'index.docker.io/library/nginx:latest',
+ 'kubernetes_resource' => {
+ 'namespace' => 'production',
+ 'kind' => 'deployment',
+ 'name' => 'nginx',
+ 'container_name' => 'nginx',
+ 'agent_id' => '2'
+ },
+ 'dependency' => {
+ 'package' => { 'name' => 'libc' },
+ 'version' => 'v1.2.3'
+ }
+ }
+ }
+ )
+ end
+ end
+
+ context 'with valid raw_metadata' do
+ where(:raw_metadata) do
+ [
+ '{}',
+ '{"location":null}',
+ '{"location":{"image":"index.docker.io/library/nginx:latest","kubernetes_resource":{"namespace":"production","kind":"deployment","name":"nginx","container_name":"nginx","agent_id":"2"},"dependency":{"package":{"name":"libc"},"version":"v1.2.3"}}}'
+ ]
+ end
+
+ with_them do
+ it 'does not change the raw_metadata' do
+ expect { perform }.not_to raise_error
+
+ result = finding.reload.raw_metadata
+ expect(result).to eq(raw_metadata)
+ end
+ end
+ end
+
+ context 'when raw_metadata contains forbidden types' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:raw_metadata, :type) do
+ 'def foo; "bar"; end' | :def
+ '`cat somefile`' | :xstr
+ 'exec("cat /etc/passwd")' | :send
+ end
+
+ with_them do
+ it 'does not change the raw_metadata' do
+ expect(Gitlab::AppLogger).to receive(:error).with(message: "expected raw_metadata to be a hash", type: type)
+
+ expect { perform }.not_to raise_error
+
+ result = finding.reload.raw_metadata
+ expect(result).to eq(raw_metadata)
+ end
+ end
+ end
+
+ context 'when forbidden types are nested inside a hash' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:raw_metadata, :type) do
+ '{:location=>Env.fetch("SOME_VAR")}' | :send
+ '{:location=>{:image=>Env.fetch("SOME_VAR")}}' | :send
+ # rubocop:disable Lint/InterpolationCheck
+ '{"key"=>"value: #{send}"}' | :dstr
+ # rubocop:enable Lint/InterpolationCheck
+ end
+
+ with_them do
+ it 'does not change the raw_metadata' do
+ expect(Gitlab::AppLogger).to receive(:error).with(
+ message: "error parsing raw_metadata",
+ error: "value of a pair was an unexpected type",
+ type: type
+ )
+
+ expect { perform }.not_to raise_error
+
+ result = finding.reload.raw_metadata
+ expect(result).to eq(raw_metadata)
+ end
+ end
+ end
+
+ context 'when key is an unexpected type' do
+ let(:raw_metadata) { "{nil=>nil}" }
+
+ it 'logs error' do
+ expect(Gitlab::AppLogger).to receive(:error).with(
+ message: "error parsing raw_metadata",
+ error: "expected key to be either symbol, string, or integer",
+ type: :nil
+ )
+
+ expect { perform }.not_to raise_error
+ end
+ end
+
+ context 'when raw_metadata cannot be parsed' do
+ let(:raw_metadata) { "{" }
+
+ it 'logs error' do
+ expect(Gitlab::AppLogger).to receive(:error).with(message: "error parsing raw_metadata", error: "unexpected token $end")
+
+ expect { perform }.not_to raise_error
+ end
+ end
+
+ describe '#hash_from_s' do
+ subject { described_class.new.hash_from_s(input) }
+
+ context 'with valid input' do
+ let(:input) { '{:location=>{"image"=>"index.docker.io/library/nginx:latest", "kubernetes_resource"=>{"namespace"=>"production", "kind"=>"deployment", "name"=>"nginx", "container_name"=>"nginx", "agent_id"=>2}, "dependency"=>{"package"=>{"name"=>"libc"}, "version"=>"v1.2.3"}}}' }
+
+ it 'converts string to a hash' do
+ expect(subject).to eq({
+ location: {
+ 'image' => 'index.docker.io/library/nginx:latest',
+ 'kubernetes_resource' => {
+ 'namespace' => 'production',
+ 'kind' => 'deployment',
+ 'name' => 'nginx',
+ 'container_name' => 'nginx',
+ 'agent_id' => 2
+ },
+ 'dependency' => {
+ 'package' => { 'name' => 'libc' },
+ 'version' => 'v1.2.3'
+ }
+ }
+ })
+ end
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:input, :expected) do
+ '{}' | {}
+ '{"bool"=>true}' | { 'bool' => true }
+ '{"bool"=>false}' | { 'bool' => false }
+ '{"nil"=>nil}' | { 'nil' => nil }
+ '{"array"=>[1, "foo", nil]}' | { 'array' => [1, "foo", nil] }
+ '{foo: :bar}' | { foo: :bar }
+ '{foo: {bar: "bin"}}' | { foo: { bar: "bin" } }
+ end
+
+ with_them do
+ specify { expect(subject).to eq(expected) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
index 7a524d1489a..43d41408e66 100644
--- a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
+++ b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb
@@ -202,23 +202,50 @@ RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do
end
describe '#perform' do
- let(:migration) { spy(:migration) }
- let(:connection) { double('connection') }
+ let(:connection) { double(:connection) }
before do
- stub_const('Gitlab::BackgroundMigration::Foo', migration)
-
allow(coordinator).to receive(:connection).and_return(connection)
end
- it 'performs a background migration with the configured shared connection' do
- expect(coordinator).to receive(:with_shared_connection).and_call_original
+ context 'when the background migration does not inherit from BaseJob' do
+ let(:migration_class) { Class.new }
+
+ before do
+ stub_const('Gitlab::BackgroundMigration::Foo', migration_class)
+ end
+
+ it 'performs a background migration with the configured shared connection' do
+ expect(coordinator).to receive(:with_shared_connection).and_call_original
+
+ expect_next_instance_of(migration_class) do |migration|
+ expect(migration).to receive(:perform).with(10, 20).once do
+ expect(Gitlab::Database::SharedModel.connection).to be(connection)
+ end
+ end
+
+ coordinator.perform('Foo', [10, 20])
+ end
+ end
+
+ context 'when the background migration inherits from BaseJob' do
+ let(:migration_class) { Class.new(::Gitlab::BackgroundMigration::BaseJob) }
+ let(:migration) { double(:migration) }
- expect(migration).to receive(:perform).with(10, 20).once do
- expect(Gitlab::Database::SharedModel.connection).to be(connection)
+ before do
+ stub_const('Gitlab::BackgroundMigration::Foo', migration_class)
end
- coordinator.perform('Foo', [10, 20])
+ it 'passes the correct connection when constructing the migration' do
+ expect(coordinator).to receive(:with_shared_connection).and_call_original
+
+ expect(migration_class).to receive(:new).with(connection: connection).and_return(migration)
+ expect(migration).to receive(:perform).with(10, 20).once do
+ expect(Gitlab::Database::SharedModel.connection).to be(connection)
+ end
+
+ coordinator.perform('Foo', [10, 20])
+ end
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb b/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
deleted file mode 100644
index 5c93e69b5e5..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_legacy_artifacts_spec.rb
+++ /dev/null
@@ -1,158 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigrateLegacyArtifacts, schema: 20210210093901 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:jobs) { table(:ci_builds) }
- let(:job_artifacts) { table(:ci_job_artifacts) }
-
- subject { described_class.new.perform(*range) }
-
- context 'when a pipeline exists' do
- let!(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let!(:project) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
- let!(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
-
- context 'when a legacy artifacts exists' do
- let(:artifacts_expire_at) { 1.day.since.to_s }
- let(:file_store) { ::ObjectStorage::Store::REMOTE }
-
- let!(:job) do
- jobs.create!(
- commit_id: pipeline.id,
- project_id: project.id,
- status: :success,
- **artifacts_archive_attributes,
- **artifacts_metadata_attributes)
- end
-
- let(:artifacts_archive_attributes) do
- {
- artifacts_file: 'archive.zip',
- artifacts_file_store: file_store,
- artifacts_size: 123,
- artifacts_expire_at: artifacts_expire_at
- }
- end
-
- let(:artifacts_metadata_attributes) do
- {
- artifacts_metadata: 'metadata.gz',
- artifacts_metadata_store: file_store
- }
- end
-
- it 'has legacy artifacts' do
- expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([artifacts_archive_attributes.values])
- expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([artifacts_metadata_attributes.values])
- end
-
- it 'does not have new artifacts yet' do
- expect(job_artifacts.count).to be_zero
- end
-
- context 'when the record exists inside of the range of a background migration' do
- let(:range) { [job.id, job.id] }
-
- it 'migrates a legacy artifact to ci_job_artifacts table' do
- expect { subject }.to change { job_artifacts.count }.by(2)
-
- expect(job_artifacts.order(:id).pluck('project_id, job_id, file_type, file_store, size, expire_at, file, file_sha256, file_location'))
- .to eq([[project.id,
- job.id,
- described_class::ARCHIVE_FILE_TYPE,
- file_store,
- artifacts_archive_attributes[:artifacts_size],
- artifacts_expire_at,
- 'archive.zip',
- nil,
- described_class::LEGACY_PATH_FILE_LOCATION],
- [project.id,
- job.id,
- described_class::METADATA_FILE_TYPE,
- file_store,
- nil,
- artifacts_expire_at,
- 'metadata.gz',
- nil,
- described_class::LEGACY_PATH_FILE_LOCATION]])
-
- expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, artifacts_expire_at]])
- expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
- end
-
- context 'when file_store is nil' do
- let(:file_store) { nil }
-
- it 'has nullified file_store in all legacy artifacts' do
- expect(jobs.pluck('artifacts_file_store, artifacts_metadata_store')).to eq([[nil, nil]])
- end
-
- it 'fills file_store by the value of local file store' do
- subject
-
- expect(job_artifacts.pluck('file_store')).to all(eq(::ObjectStorage::Store::LOCAL))
- end
- end
-
- context 'when new artifacts has already existed' do
- context 'when only archive.zip existed' do
- before do
- job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
- end
-
- it 'had archive.zip already' do
- expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE)).to be_truthy
- end
-
- it 'migrates metadata' do
- expect { subject }.to change { job_artifacts.count }.by(1)
-
- expect(job_artifacts.exists?(job_id: job.id, file_type: described_class::METADATA_FILE_TYPE)).to be_truthy
- end
- end
-
- context 'when both archive and metadata existed' do
- before do
- job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::ARCHIVE_FILE_TYPE, size: 999, file: 'archive.zip')
- job_artifacts.create!(project_id: project.id, job_id: job.id, file_type: described_class::METADATA_FILE_TYPE, size: 999, file: 'metadata.zip')
- end
-
- it 'does not migrate' do
- expect { subject }.not_to change { job_artifacts.count }
- end
- end
- end
- end
-
- context 'when the record exists outside of the range of a background migration' do
- let(:range) { [job.id + 1, job.id + 1] }
-
- it 'does not migrate' do
- expect { subject }.not_to change { job_artifacts.count }
- end
- end
- end
-
- context 'when the job does not have legacy artifacts' do
- let!(:job) { jobs.create!(commit_id: pipeline.id, project_id: project.id, status: :success) }
-
- it 'does not have the legacy artifacts in database' do
- expect(jobs.count).to eq(1)
- expect(jobs.pluck('artifacts_file, artifacts_file_store, artifacts_size, artifacts_expire_at')).to eq([[nil, nil, nil, nil]])
- expect(jobs.pluck('artifacts_metadata, artifacts_metadata_store')).to eq([[nil, nil]])
- end
-
- context 'when the record exists inside of the range of a background migration' do
- let(:range) { [job.id, job.id] }
-
- it 'does not migrate' do
- expect { subject }.not_to change { job_artifacts.count }
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
index ab183d01357..fc957a7c425 100644
--- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'webauthn/u2f_migrator'
-RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20181228175414 do
+RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210301200959 do
let(:users) { table(:users) }
let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) }
diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
index b34a57f51f1..79b5567f5b3 100644
--- a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
+++ b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 2021_02_26_120851 do
+RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210301200959 do
let(:enabled) { 20 }
let(:disabled) { 0 }
diff --git a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
index 25006e663ab..68fe8f39f59 100644
--- a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20181228175414 do
+RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20210301200959 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb b/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb
index a03a11489b5..b00eb185b34 100644
--- a/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb
+++ b/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::PopulateIssueEmailParticipants, schema: 20181228175414 do
+RSpec.describe Gitlab::BackgroundMigration::PopulateIssueEmailParticipants, schema: 20210301200959 do
let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
let!(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) }
let!(:issue1) { table(:issues).create!(id: 1, project_id: project.id, service_desk_reply_to: "a@gitlab.com") }
diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
index 4cdb56d3d3b..a54c840dd8e 100644
--- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
+++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -2,82 +2,124 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, schema: 20181228175414 do
+def create_background_migration_job(ids, status)
+ proper_status = case status
+ when :pending
+ Gitlab::Database::BackgroundMigrationJob.statuses['pending']
+ when :succeeded
+ Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']
+ else
+ raise ArgumentError
+ end
+
+ background_migration_jobs.create!(
+ class_name: 'RecalculateVulnerabilitiesOccurrencesUuid',
+ arguments: Array(ids),
+ status: proper_status,
+ created_at: Time.now.utc
+ )
+end
+
+RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, schema: 20211124132705 do
+ let(:background_migration_jobs) { table(:background_migration_jobs) }
+ let(:pending_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['pending']) }
+ let(:succeeded_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']) }
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let(:users) { table(:users) }
let(:user) { create_user! }
let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
let(:scanners) { table(:vulnerability_scanners) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+ let(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
let(:vulnerabilities) { table(:vulnerabilities) }
- let(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_findings) { table(:vulnerability_occurrences) }
+ let(:vulnerability_finding_pipelines) { table(:vulnerability_occurrence_pipelines) }
+ let(:vulnerability_finding_signatures) { table(:vulnerability_finding_signatures) }
let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
- let(:vulnerability_identifier) do
+ let(:identifier_1) { 'identifier-1' }
+ let!(:vulnerability_identifier) do
vulnerability_identifiers.create!(
project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: Gitlab::Database::ShaAttribute.serialize('7e394d1b1eb461a7406d7b1e08f057a1cf11287a'),
- name: 'Identifier for UUIDv5')
+ external_type: identifier_1,
+ external_id: identifier_1,
+ fingerprint: Gitlab::Database::ShaAttribute.serialize('ff9ef548a6e30a0462795d916f3f00d1e2b082ca'),
+ name: 'Identifier 1')
end
- let(:different_vulnerability_identifier) do
+ let(:identifier_2) { 'identifier-2' }
+ let!(:vulnerability_identfier2) do
vulnerability_identifiers.create!(
project_id: project.id,
- external_type: 'uuid-v4',
- external_id: 'uuid-v4',
- fingerprint: Gitlab::Database::ShaAttribute.serialize('772da93d34a1ba010bcb5efa9fb6f8e01bafcc89'),
- name: 'Identifier for UUIDv4')
+ external_type: identifier_2,
+ external_id: identifier_2,
+ fingerprint: Gitlab::Database::ShaAttribute.serialize('4299e8ddd819f9bde9cfacf45716724c17b5ddf7'),
+ name: 'Identifier 2')
end
- let!(:vulnerability_for_uuidv4) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:vulnerability_for_uuidv5) do
- create_vulnerability!(
+ let(:identifier_3) { 'identifier-3' }
+ let!(:vulnerability_identifier3) do
+ vulnerability_identifiers.create!(
project_id: project.id,
- author_id: user.id
- )
+ external_type: identifier_3,
+ external_id: identifier_3,
+ fingerprint: Gitlab::Database::ShaAttribute.serialize('8e91632f9c6671e951834a723ee221c44cc0d844'),
+ name: 'Identifier 3')
end
- let(:known_uuid_v5) { "77211ed6-7dff-5f6b-8c9a-da89ad0a9b60" }
let(:known_uuid_v4) { "b3cc2518-5446-4dea-871c-89d5e999c1ac" }
- let(:desired_uuid_v5) { "3ca8ad45-6344-508b-b5e3-306a3bd6c6ba" }
+ let(:known_uuid_v5) { "05377088-dc26-5161-920e-52a7159fdaa1" }
+ let(:desired_uuid_v5) { "f3e9a23f-9181-54bf-a5ab-c5bc7a9b881a" }
- subject { described_class.new.perform(finding.id, finding.id) }
+ subject { described_class.new.perform(start_id, end_id) }
+
+ context 'when the migration is disabled by the feature flag' do
+ let(:start_id) { 1 }
+ let(:end_id) { 1001 }
+
+ before do
+ stub_feature_flags(migrate_vulnerability_finding_uuids: false)
+ end
+
+ it 'logs the info message and does not run the migration' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:info).once.with(message: 'Migration is disabled by the feature flag',
+ migrator: 'RecalculateVulnerabilitiesOccurrencesUuid',
+ start_id: start_id,
+ end_id: end_id)
+ end
+
+ subject
+ end
+ end
context "when finding has a UUIDv4" do
before do
@uuid_v4 = create_finding!(
- vulnerability_id: vulnerability_for_uuidv4.id,
+ vulnerability_id: nil,
project_id: project.id,
- scanner_id: different_scanner.id,
- primary_identifier_id: different_vulnerability_identifier.id,
+ scanner_id: scanner2.id,
+ primary_identifier_id: vulnerability_identfier2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"),
uuid: known_uuid_v4
)
end
- let(:finding) { @uuid_v4 }
+ let(:start_id) { @uuid_v4.id }
+ let(:end_id) { @uuid_v4.id }
it "replaces it with UUIDv5" do
- expect(vulnerabilities_findings.pluck(:uuid)).to eq([known_uuid_v4])
+ expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v4])
subject
- expect(vulnerabilities_findings.pluck(:uuid)).to eq([desired_uuid_v5])
+ expect(vulnerability_findings.pluck(:uuid)).to match_array([desired_uuid_v5])
end
it 'logs recalculation' do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
- expect(instance).to receive(:info).once
+ expect(instance).to receive(:info).twice
end
subject
@@ -87,7 +129,7 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
context "when finding has a UUIDv5" do
before do
@uuid_v5 = create_finding!(
- vulnerability_id: vulnerability_for_uuidv5.id,
+ vulnerability_id: nil,
project_id: project.id,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier.id,
@@ -97,40 +139,340 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
)
end
- let(:finding) { @uuid_v5 }
+ let(:start_id) { @uuid_v5.id }
+ let(:end_id) { @uuid_v5.id }
it "stays the same" do
- expect(vulnerabilities_findings.pluck(:uuid)).to eq([known_uuid_v5])
+ expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v5])
subject
- expect(vulnerabilities_findings.pluck(:uuid)).to eq([known_uuid_v5])
+ expect(vulnerability_findings.pluck(:uuid)).to match_array([known_uuid_v5])
+ end
+ end
+
+ context 'if a duplicate UUID would be generated' do # rubocop: disable RSpec/MultipleMemoizedHelpers
+ let(:v1) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:finding_with_incorrect_uuid) do
+ create_finding!(
+ vulnerability_id: v1.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identifier.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: 'bd95c085-71aa-51d7-9bb6-08ae669c262e'
+ )
+ end
+
+ let(:v2) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:finding_with_correct_uuid) do
+ create_finding!(
+ vulnerability_id: v2.id,
+ project_id: project.id,
+ primary_identifier_id: vulnerability_identifier.id,
+ scanner_id: scanner2.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: '91984483-5efe-5215-b471-d524ac5792b1'
+ )
+ end
+
+ let(:v3) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:finding_with_incorrect_uuid2) do
+ create_finding!(
+ vulnerability_id: v3.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identfier2.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: '00000000-1111-2222-3333-444444444444'
+ )
+ end
+
+ let(:v4) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let!(:finding_with_correct_uuid2) do
+ create_finding!(
+ vulnerability_id: v4.id,
+ project_id: project.id,
+ scanner_id: scanner2.id,
+ primary_identifier_id: vulnerability_identfier2.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: '1edd751e-ef9a-5391-94db-a832c8635bfc'
+ )
+ end
+
+ let!(:finding_with_incorrect_uuid3) do
+ create_finding!(
+ vulnerability_id: nil,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identifier3.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: '22222222-3333-4444-5555-666666666666'
+ )
+ end
+
+ let!(:duplicate_not_in_the_same_batch) do
+ create_finding!(
+ id: 99999,
+ vulnerability_id: nil,
+ project_id: project.id,
+ scanner_id: scanner2.id,
+ primary_identifier_id: vulnerability_identifier3.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: '4564f9d5-3c6b-5cc3-af8c-7c25285362a7'
+ )
+ end
+
+ let(:start_id) { finding_with_incorrect_uuid.id }
+ let(:end_id) { finding_with_incorrect_uuid3.id }
+
+ before do
+ 4.times do
+ create_finding_pipeline!(project_id: project.id, finding_id: finding_with_incorrect_uuid.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: finding_with_correct_uuid.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: finding_with_incorrect_uuid2.id)
+ create_finding_pipeline!(project_id: project.id, finding_id: finding_with_correct_uuid2.id)
+ end
+ end
+
+ it 'drops duplicates and related records', :aggregate_failures do
+ expect(vulnerability_findings.pluck(:id)).to match_array([
+ finding_with_correct_uuid.id, finding_with_incorrect_uuid.id, finding_with_correct_uuid2.id, finding_with_incorrect_uuid2.id, finding_with_incorrect_uuid3.id, duplicate_not_in_the_same_batch.id
+ ])
+
+ expect { subject }.to change(vulnerability_finding_pipelines, :count).from(16).to(8)
+ .and change(vulnerability_findings, :count).from(6).to(3)
+ .and change(vulnerabilities, :count).from(4).to(2)
+
+ expect(vulnerability_findings.pluck(:id)).to match_array([finding_with_incorrect_uuid.id, finding_with_incorrect_uuid2.id, finding_with_incorrect_uuid3.id])
+ end
+
+ context 'if there are conflicting UUID values within the batch' do # rubocop: disable RSpec/MultipleMemoizedHelpers
+ let(:end_id) { finding_with_broken_data_integrity.id }
+ let(:vulnerability_5) { create_vulnerability!(project_id: project.id, author_id: user.id) }
+ let(:different_project) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:identifier_with_broken_data_integrity) do
+ vulnerability_identifiers.create!(
+ project_id: different_project.id,
+ external_type: identifier_2,
+ external_id: identifier_2,
+ fingerprint: Gitlab::Database::ShaAttribute.serialize('4299e8ddd819f9bde9cfacf45716724c17b5ddf7'),
+ name: 'Identifier 2')
+ end
+
+ let(:finding_with_broken_data_integrity) do
+ create_finding!(
+ vulnerability_id: vulnerability_5,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: identifier_with_broken_data_integrity.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: SecureRandom.uuid
+ )
+ end
+
+ it 'deletes the conflicting record' do
+ expect { subject }.to change { vulnerability_findings.find_by_id(finding_with_broken_data_integrity.id) }.to(nil)
+ end
+ end
+
+ context 'if a conflicting UUID is found during the migration' do # rubocop:disable RSpec/MultipleMemoizedHelpers
+ let(:finding_class) { Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding }
+ let(:uuid) { '4564f9d5-3c6b-5cc3-af8c-7c25285362a7' }
+
+ before do
+ exception = ActiveRecord::RecordNotUnique.new("(uuid)=(#{uuid})")
+
+ call_count = 0
+ allow(::Gitlab::Database::BulkUpdate).to receive(:execute) do
+ call_count += 1
+ call_count.eql?(1) ? raise(exception) : {}
+ end
+
+ allow(finding_class).to receive(:find_by).with(uuid: uuid).and_return(duplicate_not_in_the_same_batch)
+ end
+
+ it 'retries the recalculation' do
+ subject
+
+ expect(Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid::VulnerabilitiesFinding).to have_received(:find_by).with(uuid: uuid).once
+ end
+
+ it 'logs the conflict' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:info).exactly(6).times
+ end
+
+ subject
+ end
+
+ it 'marks the job as done' do
+ create_background_migration_job([start_id, end_id], :pending)
+
+ subject
+
+ expect(pending_jobs.count).to eq(0)
+ expect(succeeded_jobs.count).to eq(1)
+ end
+ end
+
+ it 'logs an exception if a different uniquness problem was found' do
+ exception = ActiveRecord::RecordNotUnique.new("Totally not an UUID uniqueness problem")
+ allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(exception)
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+
+ subject
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_exception).with(exception).once
+ end
+
+ it 'logs a duplicate found message' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:info).exactly(3).times
+ end
+
+ subject
+ end
+ end
+
+ context 'when finding has a signature' do
+ before do
+ @f1 = create_finding!(
+ vulnerability_id: nil,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identifier.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: 'd15d774d-e4b1-5a1b-929b-19f2a53e35ec'
+ )
+
+ vulnerability_finding_signatures.create!(
+ finding_id: @f1.id,
+ algorithm_type: 2, # location
+ signature_sha: Gitlab::Database::ShaAttribute.serialize('57d4e05205f6462a73f039a5b2751aa1ab344e6e') # sha1('youshouldusethis')
+ )
+
+ vulnerability_finding_signatures.create!(
+ finding_id: @f1.id,
+ algorithm_type: 1, # hash
+ signature_sha: Gitlab::Database::ShaAttribute.serialize('c554d8d8df1a7a14319eafdaae24af421bf5b587') # sha1('andnotthis')
+ )
+
+ @f2 = create_finding!(
+ vulnerability_id: nil,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: vulnerability_identfier2.id,
+ report_type: 0, # "sast"
+ location_fingerprint: Gitlab::Database::ShaAttribute.serialize('ca41a2544e941a007a73a666cb0592b255316ab8'), # sha1('youshouldntusethis')
+ uuid: '4be029b5-75e5-5ac0-81a2-50ab41726135'
+ )
+
+ vulnerability_finding_signatures.create!(
+ finding_id: @f2.id,
+ algorithm_type: 2, # location
+ signature_sha: Gitlab::Database::ShaAttribute.serialize('57d4e05205f6462a73f039a5b2751aa1ab344e6e') # sha1('youshouldusethis')
+ )
+
+ vulnerability_finding_signatures.create!(
+ finding_id: @f2.id,
+ algorithm_type: 1, # hash
+ signature_sha: Gitlab::Database::ShaAttribute.serialize('c554d8d8df1a7a14319eafdaae24af421bf5b587') # sha1('andnotthis')
+ )
+ end
+
+ let(:start_id) { @f1.id }
+ let(:end_id) { @f2.id }
+
+ let(:uuids_before) { [@f1.uuid, @f2.uuid] }
+ let(:uuids_after) { %w[d3b60ddd-d312-5606-b4d3-ad058eebeacb 349d9bec-c677-5530-a8ac-5e58889c3b1a] }
+
+ it 'is recalculated using signature' do
+ expect(vulnerability_findings.pluck(:uuid)).to match_array(uuids_before)
+
+ subject
+
+ expect(vulnerability_findings.pluck(:uuid)).to match_array(uuids_after)
+ end
+ end
+
+ context 'if all records are removed before the job ran' do
+ let(:start_id) { 1 }
+ let(:end_id) { 9 }
+
+ before do
+ create_background_migration_job([start_id, end_id], :pending)
+ end
+
+ it 'does not error out' do
+ expect { subject }.not_to raise_error
+ end
+
+ it 'marks the job as done' do
+ subject
+
+ expect(pending_jobs.count).to eq(0)
+ expect(succeeded_jobs.count).to eq(1)
end
end
context 'when recalculation fails' do
before do
@uuid_v4 = create_finding!(
- vulnerability_id: vulnerability_for_uuidv4.id,
+ vulnerability_id: nil,
project_id: project.id,
- scanner_id: different_scanner.id,
- primary_identifier_id: different_vulnerability_identifier.id,
+ scanner_id: scanner2.id,
+ primary_identifier_id: vulnerability_identfier2.id,
report_type: 0, # "sast"
location_fingerprint: Gitlab::Database::ShaAttribute.serialize("fa18f432f1d56675f4098d318739c3cd5b14eb3e"),
uuid: known_uuid_v4
)
- allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
allow(::Gitlab::Database::BulkUpdate).to receive(:execute).and_raise(expected_error)
end
- let(:finding) { @uuid_v4 }
+ let(:start_id) { @uuid_v4.id }
+ let(:end_id) { @uuid_v4.id }
let(:expected_error) { RuntimeError.new }
it 'captures the errors and does not crash entirely' do
expect { subject }.not_to raise_error
- expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(expected_error).once
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_exception).with(expected_error).once
end
end
@@ -149,25 +491,28 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
# rubocop:disable Metrics/ParameterLists
def create_finding!(
+ id: nil,
vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
name: "test", severity: 7, confidence: 7, report_type: 0,
project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner.id,
- primary_identifier_id: vulnerability_identifier.id,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
+ vulnerability_findings.create!({
+ id: id,
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ }.compact
+ )
end
# rubocop:enable Metrics/ParameterLists
@@ -181,4 +526,9 @@ RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrence
confirmed_at: confirmed_at
)
end
+
+ def create_finding_pipeline!(project_id:, finding_id:)
+ pipeline = table(:ci_pipelines).create!(project_id: project_id)
+ vulnerability_finding_pipelines.create!(pipeline_id: pipeline.id, occurrence_id: finding_id)
+ end
end
diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb
deleted file mode 100644
index afcdaaf1cb8..00000000000
--- a/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb
+++ /dev/null
@@ -1,121 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateServices, :migration, schema: 20181228175414 do
- let_it_be(:users) { table(:users) }
- let_it_be(:namespaces) { table(:namespaces) }
- let_it_be(:projects) { table(:projects) }
- let_it_be(:services) { table(:services) }
-
- let_it_be(:alerts_service_data) { table(:alerts_service_data) }
- let_it_be(:chat_names) { table(:chat_names) }
- let_it_be(:issue_tracker_data) { table(:issue_tracker_data) }
- let_it_be(:jira_tracker_data) { table(:jira_tracker_data) }
- let_it_be(:open_project_tracker_data) { table(:open_project_tracker_data) }
- let_it_be(:slack_integrations) { table(:slack_integrations) }
- let_it_be(:web_hooks) { table(:web_hooks) }
-
- let_it_be(:data_tables) do
- [alerts_service_data, chat_names, issue_tracker_data, jira_tracker_data, open_project_tracker_data, slack_integrations, web_hooks]
- end
-
- let!(:user) { users.create!(id: 1, projects_limit: 100) }
- let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
-
- # project without duplicate services
- let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id) }
- let!(:service1) { services.create!(id: 1, project_id: project1.id, type: 'AsanaService') }
- let!(:service2) { services.create!(id: 2, project_id: project1.id, type: 'JiraService') }
- let!(:service3) { services.create!(id: 3, project_id: project1.id, type: 'SlackService') }
-
- # project with duplicate services
- let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id) }
- let!(:service4) { services.create!(id: 4, project_id: project2.id, type: 'AsanaService') }
- let!(:service5) { services.create!(id: 5, project_id: project2.id, type: 'JiraService') }
- let!(:service6) { services.create!(id: 6, project_id: project2.id, type: 'JiraService') }
- let!(:service7) { services.create!(id: 7, project_id: project2.id, type: 'SlackService') }
- let!(:service8) { services.create!(id: 8, project_id: project2.id, type: 'SlackService') }
- let!(:service9) { services.create!(id: 9, project_id: project2.id, type: 'SlackService') }
-
- # project with duplicate services and dependant records
- let!(:project3) { projects.create!(id: 3, namespace_id: namespace.id) }
- let!(:service10) { services.create!(id: 10, project_id: project3.id, type: 'AlertsService') }
- let!(:service11) { services.create!(id: 11, project_id: project3.id, type: 'AlertsService') }
- let!(:service12) { services.create!(id: 12, project_id: project3.id, type: 'SlashCommandsService') }
- let!(:service13) { services.create!(id: 13, project_id: project3.id, type: 'SlashCommandsService') }
- let!(:service14) { services.create!(id: 14, project_id: project3.id, type: 'IssueTrackerService') }
- let!(:service15) { services.create!(id: 15, project_id: project3.id, type: 'IssueTrackerService') }
- let!(:service16) { services.create!(id: 16, project_id: project3.id, type: 'JiraService') }
- let!(:service17) { services.create!(id: 17, project_id: project3.id, type: 'JiraService') }
- let!(:service18) { services.create!(id: 18, project_id: project3.id, type: 'OpenProjectService') }
- let!(:service19) { services.create!(id: 19, project_id: project3.id, type: 'OpenProjectService') }
- let!(:service20) { services.create!(id: 20, project_id: project3.id, type: 'SlackService') }
- let!(:service21) { services.create!(id: 21, project_id: project3.id, type: 'SlackService') }
- let!(:dependant_records) do
- alerts_service_data.create!(id: 1, service_id: service10.id)
- alerts_service_data.create!(id: 2, service_id: service11.id)
- chat_names.create!(id: 1, service_id: service12.id, user_id: user.id, team_id: 'team1', chat_id: 'chat1')
- chat_names.create!(id: 2, service_id: service13.id, user_id: user.id, team_id: 'team2', chat_id: 'chat2')
- issue_tracker_data.create!(id: 1, service_id: service14.id)
- issue_tracker_data.create!(id: 2, service_id: service15.id)
- jira_tracker_data.create!(id: 1, service_id: service16.id)
- jira_tracker_data.create!(id: 2, service_id: service17.id)
- open_project_tracker_data.create!(id: 1, service_id: service18.id)
- open_project_tracker_data.create!(id: 2, service_id: service19.id)
- slack_integrations.create!(id: 1, service_id: service20.id, user_id: user.id, team_id: 'team1', team_name: 'team1', alias: 'alias1')
- slack_integrations.create!(id: 2, service_id: service21.id, user_id: user.id, team_id: 'team2', team_name: 'team2', alias: 'alias2')
- web_hooks.create!(id: 1, service_id: service20.id)
- web_hooks.create!(id: 2, service_id: service21.id)
- end
-
- # project without services
- let!(:project4) { projects.create!(id: 4, namespace_id: namespace.id) }
-
- it 'removes duplicate services and dependant records' do
- # Determine which services we expect to keep
- expected_services = projects.pluck(:id).each_with_object({}) do |project_id, map|
- project_services = services.where(project_id: project_id)
- types = project_services.distinct.pluck(:type)
-
- map[project_id] = types.map { |type| project_services.where(type: type).take!.id }
- end
-
- expect do
- subject.perform(project2.id, project3.id)
- end.to change { services.count }.from(21).to(12)
-
- services1 = services.where(project_id: project1.id)
- expect(services1.count).to be(3)
- expect(services1.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
- expect(services1.pluck(:id)).to contain_exactly(*expected_services[project1.id])
-
- services2 = services.where(project_id: project2.id)
- expect(services2.count).to be(3)
- expect(services2.pluck(:type)).to contain_exactly('AsanaService', 'JiraService', 'SlackService')
- expect(services2.pluck(:id)).to contain_exactly(*expected_services[project2.id])
-
- services3 = services.where(project_id: project3.id)
- expect(services3.count).to be(6)
- expect(services3.pluck(:type)).to contain_exactly('AlertsService', 'SlashCommandsService', 'IssueTrackerService', 'JiraService', 'OpenProjectService', 'SlackService')
- expect(services3.pluck(:id)).to contain_exactly(*expected_services[project3.id])
-
- kept_services = expected_services.values.flatten
- data_tables.each do |table|
- expect(table.count).to be(1)
- expect(kept_services).to include(table.pluck(:service_id).first)
- end
- end
-
- it 'does not delete services without duplicates' do
- expect do
- subject.perform(project1.id, project4.id)
- end.not_to change { services.count }
- end
-
- it 'only deletes duplicate services for the current batch' do
- expect do
- subject.perform(project2.id)
- end.to change { services.count }.by(-3)
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
index fadee64886f..ccf96e036ae 100644
--- a/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_vulnerability_finding_links_spec.rb
@@ -41,8 +41,8 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveVulnerabilityFindingLinks, :mi
# vulnerability finding links
let!(:links) do
{
- findings.first => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.first.id, name: "Link Name 1", url: "link_url1.example") },
- findings.second => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.second.id, name: "Link Name 2", url: "link_url2.example") }
+ findings.first => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.first.id, name: "Link Name 1", url: "link_url1_#{id}.example") },
+ findings.second => Array.new(5) { |id| finding_links.create!(vulnerability_occurrence_id: findings.second.id, name: "Link Name 2", url: "link_url2_#{id}.example") }
}
end
diff --git a/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb b/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb
index 5c197526a55..17fe25c7f71 100644
--- a/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb
+++ b/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20181228175414 do
+RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20210301200959 do
let(:users) { table(:users) }
let(:emails) { table(:emails) }
let(:user_synced_attributes_metadata) { table(:user_synced_attributes_metadata) }
diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb
index 633c4baa931..1cb4edd7337 100644
--- a/spec/lib/gitlab/checks/changes_access_spec.rb
+++ b/spec/lib/gitlab/checks/changes_access_spec.rb
@@ -44,16 +44,30 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
it 'calls #new_commits' do
expect(project.repository).to receive(:new_commits).and_call_original
- expect(subject.commits).to eq([])
+ expect(subject.commits).to match_array([])
end
context 'when changes contain empty revisions' do
- let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
let(:expected_commit) { instance_double(Commit) }
- it 'returns only commits with non empty revisions' do
- expect(project.repository).to receive(:new_commits).with([newrev], { allow_quarantine: true }) { [expected_commit] }
- expect(subject.commits).to eq([expected_commit])
+ shared_examples 'returns only commits with non empty revisions' do
+ specify do
+ expect(project.repository).to receive(:new_commits).with([newrev], { allow_quarantine: allow_quarantine }) { [expected_commit] }
+ expect(subject.commits).to match_array([expected_commit])
+ end
+ end
+
+ it_behaves_like 'returns only commits with non empty revisions' do
+ let(:changes) { [{ oldrev: oldrev, newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
+ let(:allow_quarantine) { true }
+ end
+
+ context 'without oldrev' do
+ it_behaves_like 'returns only commits with non empty revisions' do
+ let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
+ # The quarantine directory should not be used because we're lacking oldrev.
+ let(:allow_quarantine) { false }
+ end
end
end
end
@@ -61,12 +75,13 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
describe '#commits_for' do
let(:new_commits) { [] }
let(:expected_commits) { [] }
+ let(:oldrev) { Gitlab::Git::BLANK_SHA }
shared_examples 'a listing of new commits' do
it 'returns expected commits' do
expect(subject).to receive(:commits).and_return(new_commits)
- expect(subject.commits_for(newrev)).to eq(expected_commits)
+ expect(subject.commits_for(oldrev, newrev)).to eq(expected_commits)
end
end
@@ -172,6 +187,31 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
it_behaves_like 'a listing of new commits'
end
+
+ context 'with over-push' do
+ let(:newrev) { '1' }
+ let(:oldrev) { '3' }
+
+ # `#new_commits` returns too many commits, where some commits are not
+ # part of the current change.
+ let(:new_commits) do
+ [
+ create_commit('1', %w[2]),
+ create_commit('2', %w[3]),
+ create_commit('3', %w[4]),
+ create_commit('4', %w[])
+ ]
+ end
+
+ let(:expected_commits) do
+ [
+ create_commit('1', %w[2]),
+ create_commit('2', %w[3])
+ ]
+ end
+
+ it_behaves_like 'a listing of new commits'
+ end
end
describe '#single_change_accesses' do
@@ -180,10 +220,10 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
shared_examples '#single_change_access' do
before do
- commits_for.each do |id, commits|
+ commits_for.each do |oldrev, newrev, commits|
expect(subject)
.to receive(:commits_for)
- .with(id)
+ .with(oldrev, newrev)
.and_return(commits)
end
end
@@ -205,7 +245,12 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
end
context 'with a single change and no new commits' do
- let(:commits_for) { { 'new' => [] } }
+ let(:commits_for) do
+ [
+ ['old', 'new', []]
+ ]
+ end
+
let(:changes) do
[
{ oldrev: 'old', newrev: 'new', ref: 'refs/heads/branch' }
@@ -222,7 +267,12 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
end
context 'with a single change and new commits' do
- let(:commits_for) { { 'new' => [create_commit('new', [])] } }
+ let(:commits_for) do
+ [
+ ['old', 'new', [create_commit('new', [])]]
+ ]
+ end
+
let(:changes) do
[
{ oldrev: 'old', newrev: 'new', ref: 'refs/heads/branch' }
@@ -240,11 +290,11 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
context 'with multiple changes' do
let(:commits_for) do
- {
- 'a' => [create_commit('a', [])],
- 'c' => [create_commit('c', [])],
- 'd' => []
- }
+ [
+ [nil, 'a', [create_commit('a', [])]],
+ ['a', 'c', [create_commit('c', [])]],
+ [nil, 'd', []]
+ ]
end
let(:changes) do
diff --git a/spec/lib/gitlab/ci/build/status/reason_spec.rb b/spec/lib/gitlab/ci/build/status/reason_spec.rb
new file mode 100644
index 00000000000..64f35c3f464
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/status/reason_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Build::Status::Reason do
+ let(:build) { double('build') }
+
+ describe '.fabricate' do
+ context 'when failure symbol reason is being passed' do
+ it 'correctly fabricates a status reason object' do
+ reason = described_class.fabricate(build, :script_failure)
+
+ expect(reason.failure_reason_enum).to eq 1
+ end
+ end
+
+ context 'when another status reason object is being passed' do
+ it 'correctly fabricates a status reason object' do
+ reason = described_class.fabricate(build, :script_failure)
+
+ new_reason = described_class.fabricate(build, reason)
+
+ expect(new_reason.failure_reason_enum).to eq 1
+ end
+ end
+ end
+
+ describe '#failure_reason_enum' do
+ it 'exposes a failure reason enum' do
+ reason = described_class.fabricate(build, :script_failure)
+
+ enum = ::CommitStatus.failure_reasons[:script_failure]
+
+ expect(reason.failure_reason_enum).to eq enum
+ end
+ end
+
+ describe '#force_allow_failure?' do
+ context 'when build is not allowed to fail' do
+ context 'when build is allowed to fail with a given exit code' do
+ it 'returns true' do
+ reason = described_class.new(build, :script_failure, 11)
+
+ allow(build).to receive(:allow_failure?).and_return(false)
+ allow(build).to receive(:allowed_to_fail_with_code?)
+ .with(11)
+ .and_return(true)
+
+ expect(reason.force_allow_failure?).to be true
+ end
+ end
+
+ context 'when build is not allowed to fail regardless of an exit code' do
+ it 'returns false' do
+ reason = described_class.new(build, :script_failure, 11)
+
+ allow(build).to receive(:allow_failure?).and_return(false)
+ allow(build).to receive(:allowed_to_fail_with_code?)
+ .with(11)
+ .and_return(false)
+
+ expect(reason.force_allow_failure?).to be false
+ end
+ end
+
+ context 'when an exit code is not specified' do
+ it 'returns false' do
+ reason = described_class.new(build, :script_failure)
+
+ expect(reason.force_allow_failure?).to be false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index d862fbf5b78..749d1386ed9 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -3,7 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Root do
- let(:root) { described_class.new(hash) }
+ let(:user) {}
+ let(:project) {}
+ let(:root) { described_class.new(hash, user: user, project: project) }
describe '.nodes' do
it 'returns a hash' do
@@ -53,6 +55,37 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
}
end
+ context 'when deprecated types keyword is defined' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ let(:hash) do
+ { types: %w(test deploy),
+ rspec: { script: 'rspec' } }
+ end
+
+ before do
+ root.compose!
+ end
+
+ it 'returns array of types as stages with a warning' do
+ expect(root.stages_value).to eq %w[test deploy]
+ expect(root.warnings).to match_array(["root `types` is deprecated in 9.0 and will be removed in 15.0."])
+ end
+
+ it 'logs usage of types keyword' do
+ expect(Gitlab::AppJsonLogger).to(
+ receive(:info)
+ .with(event: 'ci_used_deprecated_keyword',
+ entry: root[:stages].key.to_s,
+ user_id: user.id,
+ project_id: project.id)
+ )
+
+ root.compose!
+ end
+ end
+
describe '#compose!' do
before do
root.compose!
@@ -108,17 +141,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
expect(root.stages_value).to eq %w[build pages release]
end
end
-
- context 'when deprecated types key defined' do
- let(:hash) do
- { types: %w(test deploy),
- rspec: { script: 'rspec' } }
- end
-
- it 'returns array of types as stages' do
- expect(root.stages_value).to eq %w[test deploy]
- end
- end
end
describe '#jobs_value' do
diff --git a/spec/lib/gitlab/ci/jwt_v2_spec.rb b/spec/lib/gitlab/ci/jwt_v2_spec.rb
new file mode 100644
index 00000000000..33aaa145a39
--- /dev/null
+++ b/spec/lib/gitlab/ci/jwt_v2_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::JwtV2 do
+ let(:namespace) { build_stubbed(:namespace) }
+ let(:project) { build_stubbed(:project, namespace: namespace) }
+ let(:user) { build_stubbed(:user) }
+ let(:pipeline) { build_stubbed(:ci_pipeline, ref: 'auto-deploy-2020-03-19') }
+ let(:build) do
+ build_stubbed(
+ :ci_build,
+ project: project,
+ user: user,
+ pipeline: pipeline
+ )
+ end
+
+ subject(:ci_job_jwt_v2) { described_class.new(build, ttl: 30) }
+
+ it { is_expected.to be_a Gitlab::Ci::Jwt }
+
+ describe '#payload' do
+ subject(:payload) { ci_job_jwt_v2.payload }
+
+ it 'has correct values for the standard JWT attributes' do
+ aggregate_failures do
+ expect(payload[:iss]).to eq(Settings.gitlab.base_url)
+ expect(payload[:aud]).to eq(Settings.gitlab.base_url)
+ expect(payload[:sub]).to eq("project_path:#{project.full_path}:ref_type:branch:ref:#{pipeline.source_ref}")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
index 28bc685286f..0a592395c3a 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
@@ -38,20 +38,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments do
expect(job.deployment.environment).to eq(job.persisted_environment)
end
- context 'when creation failure occures' do
- before do
- allow_next_instance_of(Deployment) do |deployment|
- allow(deployment).to receive(:save!) { raise ActiveRecord::RecordInvalid }
- end
- end
-
- it 'trackes the exception' do
- expect { subject }.to raise_error(described_class::DeploymentCreationError)
-
- expect(Deployment.count).to eq(0)
- end
- end
-
context 'when the corresponding environment does not exist' do
let!(:environment) { }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
index 4206483b228..1d020d3ea79 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
let_it_be(:user) { create(:user) }
let(:pipeline) do
- build(:ci_empty_pipeline, project: project, ref: 'master')
+ build(:ci_empty_pipeline, project: project, ref: 'master', user: user)
end
let(:command) do
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
context 'tags persistence' do
let(:stage) do
- build(:ci_stage_entity, pipeline: pipeline)
+ build(:ci_stage_entity, pipeline: pipeline, project: project)
end
let(:job) do
@@ -79,12 +79,11 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
it 'extracts an empty tag list' do
expect(CommitStatus)
.to receive(:bulk_insert_tags!)
- .with(stage.statuses, {})
+ .with([job])
.and_call_original
step.perform!
- expect(job.instance_variable_defined?(:@tag_list)).to be_falsey
expect(job).to be_persisted
expect(job.tag_list).to eq([])
end
@@ -98,14 +97,13 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
it 'bulk inserts tags' do
expect(CommitStatus)
.to receive(:bulk_insert_tags!)
- .with(stage.statuses, { job.name => %w[tag1 tag2] })
+ .with([job])
.and_call_original
step.perform!
- expect(job.instance_variable_defined?(:@tag_list)).to be_falsey
expect(job).to be_persisted
- expect(job.tag_list).to match_array(%w[tag1 tag2])
+ expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
end
end
@@ -120,7 +118,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
step.perform!
- expect(job.instance_variable_defined?(:@tag_list)).to be_truthy
expect(job).to be_persisted
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
end
diff --git a/spec/lib/gitlab/ci/pipeline/logger_spec.rb b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
index 0b44e35dec1..a488bc184f8 100644
--- a/spec/lib/gitlab/ci/pipeline/logger_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/logger_spec.rb
@@ -41,6 +41,90 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Logger do
end
end
+ describe '#instrument_with_sql', :request_store do
+ subject(:instrument_with_sql) do
+ logger.instrument_with_sql(:expensive_operation, &operation)
+ end
+
+ def loggable_data(count:, db_count: nil)
+ keys = %w[
+ expensive_operation_duration_s
+ expensive_operation_db_count
+ expensive_operation_db_primary_count
+ expensive_operation_db_primary_duration_s
+ expensive_operation_db_main_count
+ expensive_operation_db_main_duration_s
+ ]
+
+ data = keys.each.with_object({}) do |key, accumulator|
+ accumulator[key] = {
+ 'count' => count,
+ 'avg' => a_kind_of(Numeric),
+ 'max' => a_kind_of(Numeric),
+ 'min' => a_kind_of(Numeric)
+ }
+ end
+
+ if db_count
+ data['expensive_operation_db_count']['max'] = db_count
+ data['expensive_operation_db_count']['min'] = db_count
+ data['expensive_operation_db_count']['avg'] = db_count
+ end
+
+ data
+ end
+
+ context 'with a single query' do
+ let(:operation) { -> { Project.count } }
+
+ it { is_expected.to eq(operation.call) }
+
+ it 'includes SQL metrics' do
+ instrument_with_sql
+
+ expect(logger.observations_hash)
+ .to match(a_hash_including(loggable_data(count: 1, db_count: 1)))
+ end
+ end
+
+ context 'with multiple queries' do
+ let(:operation) { -> { Ci::Build.count + Ci::Bridge.count } }
+
+ it { is_expected.to eq(operation.call) }
+
+ it 'includes SQL metrics' do
+ instrument_with_sql
+
+ expect(logger.observations_hash)
+ .to match(a_hash_including(loggable_data(count: 1, db_count: 2)))
+ end
+ end
+
+ context 'with multiple observations' do
+ let(:operation) { -> { Ci::Build.count + Ci::Bridge.count } }
+
+ it 'includes SQL metrics' do
+ 2.times { logger.instrument_with_sql(:expensive_operation, &operation) }
+
+ expect(logger.observations_hash)
+ .to match(a_hash_including(loggable_data(count: 2, db_count: 2)))
+ end
+ end
+
+ context 'when there are not SQL operations' do
+ let(:operation) { -> { 123 } }
+
+ it { is_expected.to eq(operation.call) }
+
+ it 'does not include SQL metrics' do
+ instrument_with_sql
+
+ expect(logger.observations_hash.keys)
+ .to match_array(['expensive_operation_duration_s'])
+ end
+ end
+ end
+
describe '#observe' do
it 'records durations of observed operations' do
loggable_data = {
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 68806fbf287..2f9fcd7caac 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:root_variables) { [] }
- let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
+ let(:seed_context) { Gitlab::Ci::Pipeline::Seed::Context.new(pipeline, root_variables: root_variables) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage, when: 'on_success' } }
let(:previous_stages) { [] }
let(:current_stage) { double(seeds_names: [attributes[:name]]) }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
index 5d8a9358e10..a76b4874eca 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let(:seed_context) { double(pipeline: pipeline, root_variables: []) }
+ let(:seed_context) { Gitlab::Ci::Pipeline::Seed::Context.new(pipeline, root_variables: []) }
let(:stages_attributes) do
[
diff --git a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
index 5b04d2abd88..a632b5dedcf 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Stage do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:previous_stages) { [] }
- let(:seed_context) { double(pipeline: pipeline, root_variables: []) }
+ let(:seed_context) { Gitlab::Ci::Pipeline::Seed::Context.new(pipeline, root_variables: []) }
let(:attributes) do
{ name: 'test',
diff --git a/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
new file mode 100644
index 00000000000..b703a8a47ac
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/build/waiting_for_approval_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Status::Build::WaitingForApproval do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(Gitlab::Ci::Status::Core.new(build, user)) }
+
+ describe '#illustration' do
+ let(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
+
+ before do
+ environment = create(:environment, name: 'production', project: project)
+ create(:deployment, :blocked, project: project, environment: environment, deployable: build)
+ end
+
+ it { expect(subject.illustration).to include(:image, :size) }
+ it { expect(subject.illustration[:title]).to eq('Waiting for approval') }
+ it { expect(subject.illustration[:content]).to include('This job deploys to the protected environment "production"') }
+ end
+
+ describe '.matches?' do
+ subject { described_class.matches?(build, user) }
+
+ let(:build) { create(:ci_build, :manual, environment: 'production', project: project) }
+
+ before do
+ create(:deployment, deployment_status, deployable: build, project: project)
+ end
+
+ context 'when build is waiting for approval' do
+ let(:deployment_status) { :blocked }
+
+ it 'is a correct match' do
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when build is not waiting for approval' do
+ let(:deployment_status) { :created }
+
+ it 'does not match' do
+ expect(subject).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
index 6c1f56de840..6c4f69fb036 100644
--- a/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
+++ b/spec/lib/gitlab/ci/tags/bulk_insert_spec.rb
@@ -5,27 +5,37 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Tags::BulkInsert do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
- let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) }
- let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline, project: project) }
- let_it_be_with_refind(:bridge) { create(:ci_bridge, pipeline: pipeline, project: project) }
+ let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline) }
+ let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline) }
- let(:statuses) { [job, bridge, other_job] }
+ let(:statuses) { [job, other_job] }
- subject(:service) { described_class.new(statuses, tags_list) }
+ subject(:service) { described_class.new(statuses) }
+
+ describe 'gem version' do
+ let(:acceptable_version) { '9.0.0' }
+
+ let(:error_message) do
+ <<~MESSAGE
+ A mechanism depending on internals of 'act-as-taggable-on` has been designed
+ to bulk insert tags for Ci::Build records.
+ Please review the code carefully before updating the gem version
+ https://gitlab.com/gitlab-org/gitlab/-/issues/350053
+ MESSAGE
+ end
+
+ it { expect(ActsAsTaggableOn::VERSION).to eq(acceptable_version), error_message }
+ end
describe '#insert!' do
context 'without tags' do
- let(:tags_list) { {} }
-
it { expect(service.insert!).to be_falsey }
end
context 'with tags' do
- let(:tags_list) do
- {
- job.name => %w[tag1 tag2],
- other_job.name => %w[tag2 tag3 tag4]
- }
+ before do
+ job.tag_list = %w[tag1 tag2]
+ other_job.tag_list = %w[tag2 tag3 tag4]
end
it 'persists tags' do
@@ -35,5 +45,18 @@ RSpec.describe Gitlab::Ci::Tags::BulkInsert do
expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
end
end
+
+ context 'with tags for only one job' do
+ before do
+ job.tag_list = %w[tag1 tag2]
+ end
+
+ it 'persists tags' do
+ expect(service.insert!).to be_truthy
+
+ expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
+ expect(other_job.reload.tag_list).to be_empty
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb b/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
index 8837ebc3652..1cd88034166 100644
--- a/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
+++ b/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
@@ -30,14 +30,6 @@ RSpec.describe Gitlab::Ci::Trace::RemoteChecksum do
context 'with remote files' do
let(:file_store) { JobArtifactUploader::Store::REMOTE }
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(ci_archived_build_trace_checksum: false)
- end
-
- it { is_expected.to be_nil }
- end
-
context 'with AWS as provider' do
it { is_expected.to eq(checksum) }
end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 5ff34592b2f..8a87cbe45c1 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -3,25 +3,201 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Variables::Builder do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:user) { project.owner }
+ let_it_be(:job) do
+ create(:ci_build,
+ pipeline: pipeline,
+ user: user,
+ yaml_variables: [{ key: 'YAML_VARIABLE', value: 'value' }]
+ )
+ end
+
let(:builder) { described_class.new(pipeline) }
- let(:pipeline) { create(:ci_pipeline) }
- let(:job) { create(:ci_build, pipeline: pipeline) }
describe '#scoped_variables' do
let(:environment) { job.expanded_environment_name }
let(:dependencies) { true }
+ let(:predefined_variables) do
+ [
+ { key: 'CI_JOB_NAME',
+ value: job.name },
+ { key: 'CI_JOB_STAGE',
+ value: job.stage },
+ { key: 'CI_NODE_TOTAL',
+ value: '1' },
+ { key: 'CI_BUILD_NAME',
+ value: job.name },
+ { key: 'CI_BUILD_STAGE',
+ value: job.stage },
+ { key: 'CI',
+ value: 'true' },
+ { key: 'GITLAB_CI',
+ value: 'true' },
+ { key: 'CI_SERVER_URL',
+ value: Gitlab.config.gitlab.url },
+ { key: 'CI_SERVER_HOST',
+ value: Gitlab.config.gitlab.host },
+ { key: 'CI_SERVER_PORT',
+ value: Gitlab.config.gitlab.port.to_s },
+ { key: 'CI_SERVER_PROTOCOL',
+ value: Gitlab.config.gitlab.protocol },
+ { key: 'CI_SERVER_NAME',
+ value: 'GitLab' },
+ { key: 'CI_SERVER_VERSION',
+ value: Gitlab::VERSION },
+ { key: 'CI_SERVER_VERSION_MAJOR',
+ value: Gitlab.version_info.major.to_s },
+ { key: 'CI_SERVER_VERSION_MINOR',
+ value: Gitlab.version_info.minor.to_s },
+ { key: 'CI_SERVER_VERSION_PATCH',
+ value: Gitlab.version_info.patch.to_s },
+ { key: 'CI_SERVER_REVISION',
+ value: Gitlab.revision },
+ { key: 'GITLAB_FEATURES',
+ value: project.licensed_features.join(',') },
+ { key: 'CI_PROJECT_ID',
+ value: project.id.to_s },
+ { key: 'CI_PROJECT_NAME',
+ value: project.path },
+ { key: 'CI_PROJECT_TITLE',
+ value: project.title },
+ { key: 'CI_PROJECT_PATH',
+ value: project.full_path },
+ { key: 'CI_PROJECT_PATH_SLUG',
+ value: project.full_path_slug },
+ { key: 'CI_PROJECT_NAMESPACE',
+ value: project.namespace.full_path },
+ { key: 'CI_PROJECT_ROOT_NAMESPACE',
+ value: project.namespace.root_ancestor.path },
+ { key: 'CI_PROJECT_URL',
+ value: project.web_url },
+ { key: 'CI_PROJECT_VISIBILITY',
+ value: "private" },
+ { key: 'CI_PROJECT_REPOSITORY_LANGUAGES',
+ value: project.repository_languages.map(&:name).join(',').downcase },
+ { key: 'CI_PROJECT_CLASSIFICATION_LABEL',
+ value: project.external_authorization_classification_label },
+ { key: 'CI_DEFAULT_BRANCH',
+ value: project.default_branch },
+ { key: 'CI_CONFIG_PATH',
+ value: project.ci_config_path_or_default },
+ { key: 'CI_PAGES_DOMAIN',
+ value: Gitlab.config.pages.host },
+ { key: 'CI_PAGES_URL',
+ value: project.pages_url },
+ { key: 'CI_API_V4_URL',
+ value: API::Helpers::Version.new('v4').root_url },
+ { key: 'CI_PIPELINE_IID',
+ value: pipeline.iid.to_s },
+ { key: 'CI_PIPELINE_SOURCE',
+ value: pipeline.source },
+ { key: 'CI_PIPELINE_CREATED_AT',
+ value: pipeline.created_at.iso8601 },
+ { key: 'CI_COMMIT_SHA',
+ value: job.sha },
+ { key: 'CI_COMMIT_SHORT_SHA',
+ value: job.short_sha },
+ { key: 'CI_COMMIT_BEFORE_SHA',
+ value: job.before_sha },
+ { key: 'CI_COMMIT_REF_NAME',
+ value: job.ref },
+ { key: 'CI_COMMIT_REF_SLUG',
+ value: job.ref_slug },
+ { key: 'CI_COMMIT_BRANCH',
+ value: job.ref },
+ { key: 'CI_COMMIT_MESSAGE',
+ value: pipeline.git_commit_message },
+ { key: 'CI_COMMIT_TITLE',
+ value: pipeline.git_commit_title },
+ { key: 'CI_COMMIT_DESCRIPTION',
+ value: pipeline.git_commit_description },
+ { key: 'CI_COMMIT_REF_PROTECTED',
+ value: (!!pipeline.protected_ref?).to_s },
+ { key: 'CI_COMMIT_TIMESTAMP',
+ value: pipeline.git_commit_timestamp },
+ { key: 'CI_COMMIT_AUTHOR',
+ value: pipeline.git_author_full_text },
+ { key: 'CI_BUILD_REF',
+ value: job.sha },
+ { key: 'CI_BUILD_BEFORE_SHA',
+ value: job.before_sha },
+ { key: 'CI_BUILD_REF_NAME',
+ value: job.ref },
+ { key: 'CI_BUILD_REF_SLUG',
+ value: job.ref_slug },
+ { key: 'YAML_VARIABLE',
+ value: 'value' },
+ { key: 'GITLAB_USER_ID',
+ value: user.id.to_s },
+ { key: 'GITLAB_USER_EMAIL',
+ value: user.email },
+ { key: 'GITLAB_USER_LOGIN',
+ value: user.username },
+ { key: 'GITLAB_USER_NAME',
+ value: user.name }
+ ].map { |var| var.merge(public: true, masked: false) }
+ end
subject { builder.scoped_variables(job, environment: environment, dependencies: dependencies) }
- it 'returns the expected variables' do
- keys = %w[CI_JOB_NAME
- CI_JOB_STAGE
- CI_NODE_TOTAL
- CI_BUILD_NAME
- CI_BUILD_STAGE]
+ it { is_expected.to be_instance_of(Gitlab::Ci::Variables::Collection) }
+
+ it { expect(subject.to_runner_variables).to eq(predefined_variables) }
+
+ context 'variables ordering' do
+ def var(name, value)
+ { key: name, value: value.to_s, public: true, masked: false }
+ end
+
+ before do
+ allow(builder).to receive(:predefined_variables) { [var('A', 1), var('B', 1)] }
+ allow(project).to receive(:predefined_variables) { [var('B', 2), var('C', 2)] }
+ allow(pipeline).to receive(:predefined_variables) { [var('C', 3), var('D', 3)] }
+ allow(job).to receive(:runner) { double(predefined_variables: [var('D', 4), var('E', 4)]) }
+ allow(builder).to receive(:kubernetes_variables) { [var('E', 5), var('F', 5)] }
+ allow(builder).to receive(:deployment_variables) { [var('F', 6), var('G', 6)] }
+ allow(job).to receive(:yaml_variables) { [var('G', 7), var('H', 7)] }
+ allow(builder).to receive(:user_variables) { [var('H', 8), var('I', 8)] }
+ allow(job).to receive(:dependency_variables) { [var('I', 9), var('J', 9)] }
+ allow(builder).to receive(:secret_instance_variables) { [var('J', 10), var('K', 10)] }
+ allow(builder).to receive(:secret_group_variables) { [var('K', 11), var('L', 11)] }
+ allow(builder).to receive(:secret_project_variables) { [var('L', 12), var('M', 12)] }
+ allow(job).to receive(:trigger_request) { double(user_variables: [var('M', 13), var('N', 13)]) }
+ allow(pipeline).to receive(:variables) { [var('N', 14), var('O', 14)] }
+ allow(pipeline).to receive(:pipeline_schedule) { double(job_variables: [var('O', 15), var('P', 15)]) }
+ end
+
+ it 'returns variables in order depending on resource hierarchy' do
+ expect(subject.to_runner_variables).to eq(
+ [var('A', 1), var('B', 1),
+ var('B', 2), var('C', 2),
+ var('C', 3), var('D', 3),
+ var('D', 4), var('E', 4),
+ var('E', 5), var('F', 5),
+ var('F', 6), var('G', 6),
+ var('G', 7), var('H', 7),
+ var('H', 8), var('I', 8),
+ var('I', 9), var('J', 9),
+ var('J', 10), var('K', 10),
+ var('K', 11), var('L', 11),
+ var('L', 12), var('M', 12),
+ var('M', 13), var('N', 13),
+ var('N', 14), var('O', 14),
+ var('O', 15), var('P', 15)])
+ end
- subject.map { |env| env[:key] }.tap do |names|
- expect(names).to include(*keys)
+ it 'overrides duplicate keys depending on resource hierarchy' do
+ expect(subject.to_hash).to match(
+ 'A' => '1', 'B' => '2',
+ 'C' => '3', 'D' => '4',
+ 'E' => '5', 'F' => '6',
+ 'G' => '7', 'H' => '8',
+ 'I' => '9', 'J' => '10',
+ 'K' => '11', 'L' => '12',
+ 'M' => '13', 'N' => '14',
+ 'O' => '15', 'P' => '15')
end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index e8b38b21ef8..20af84ce648 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -2097,6 +2097,12 @@ module Gitlab
it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in current or prior stages'
end
+ context 'duplicate needs' do
+ let(:needs) { %w(build1 build1) }
+
+ it_behaves_like 'returns errors', 'test1 has duplicate entries in the needs section.'
+ end
+
context 'needs and dependencies that are mismatching' do
let(:needs) { %w(build1) }
let(:dependencies) { %w(build2) }
@@ -2602,7 +2608,7 @@ module Gitlab
end
context 'returns errors if job stage is not a defined stage' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", type: "acceptance" } }) }
it_behaves_like 'returns errors', 'rspec job: chosen stage does not exist; available stages are .pre, build, test, .post'
end
@@ -2638,37 +2644,37 @@ module Gitlab
end
context 'returns errors if job artifacts:name is not an a string' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { name: 1 } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", artifacts: { name: 1 } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:artifacts name should be a string'
end
context 'returns errors if job artifacts:when is not an a predefined value' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { when: 1 } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", artifacts: { when: 1 } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:artifacts when should be on_success, on_failure or always'
end
context 'returns errors if job artifacts:expire_in is not an a string' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: 1 } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", artifacts: { expire_in: 1 } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:artifacts expire in should be a duration'
end
context 'returns errors if job artifacts:expire_in is not an a valid duration' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:artifacts expire in should be a duration'
end
context 'returns errors if job artifacts:untracked is not an array of strings' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { untracked: "string" } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", artifacts: { untracked: "string" } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:artifacts untracked should be a boolean value'
end
context 'returns errors if job artifacts:paths is not an array of strings' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { paths: "string" } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", artifacts: { paths: "string" } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:artifacts paths should be an array of strings'
end
@@ -2692,49 +2698,49 @@ module Gitlab
end
context 'returns errors if job cache:key is not an a string' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: 1 } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", cache: { key: 1 } } }) }
it_behaves_like 'returns errors', "jobs:rspec:cache:key should be a hash, a string or a symbol"
end
context 'returns errors if job cache:key:files is not an array of strings' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [1] } } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", cache: { key: { files: [1] } } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:cache:key:files config should be an array of strings'
end
context 'returns errors if job cache:key:files is an empty array' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [] } } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", cache: { key: { files: [] } } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:cache:key:files config requires at least 1 item'
end
context 'returns errors if job defines only cache:key:prefix' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 'prefix-key' } } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", cache: { key: { prefix: 'prefix-key' } } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:cache:key config missing required keys: files'
end
context 'returns errors if job cache:key:prefix is not an a string' do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 1, files: ['file'] } } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", cache: { key: { prefix: 1, files: ['file'] } } } }) }
it_behaves_like 'returns errors', 'jobs:rspec:cache:key:prefix config should be a string or symbol'
end
context "returns errors if job cache:untracked is not an array of strings" do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { untracked: "string" } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", cache: { untracked: "string" } } }) }
it_behaves_like 'returns errors', "jobs:rspec:cache:untracked config should be a boolean value"
end
context "returns errors if job cache:paths is not an array of strings" do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { paths: "string" } } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", cache: { paths: "string" } } }) }
it_behaves_like 'returns errors', "jobs:rspec:cache:paths config should be an array of strings"
end
context "returns errors if job dependencies is not an array of strings" do
- let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", dependencies: "string" } }) }
+ let(:config) { YAML.dump({ stages: %w(build test), rspec: { script: "test", dependencies: "string" } }) }
it_behaves_like 'returns errors', "jobs:rspec dependencies should be an array of strings"
end
diff --git a/spec/lib/gitlab/color_schemes_spec.rb b/spec/lib/gitlab/color_schemes_spec.rb
index fd9fccc2bf7..feb5648ff2d 100644
--- a/spec/lib/gitlab/color_schemes_spec.rb
+++ b/spec/lib/gitlab/color_schemes_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::ColorSchemes do
describe '.by_id' do
it 'returns a scheme by its ID' do
- expect(described_class.by_id(1).name).to eq 'White'
+ expect(described_class.by_id(1).name).to eq 'Light'
expect(described_class.by_id(4).name).to eq 'Solarized Dark'
end
end
diff --git a/spec/lib/gitlab/config/entry/configurable_spec.rb b/spec/lib/gitlab/config/entry/configurable_spec.rb
index 0153cfbf091..154038f51c7 100644
--- a/spec/lib/gitlab/config/entry/configurable_spec.rb
+++ b/spec/lib/gitlab/config/entry/configurable_spec.rb
@@ -39,7 +39,8 @@ RSpec.describe Gitlab::Config::Entry::Configurable do
entry :object, entry_class,
description: 'test object',
inherit: true,
- reserved: true
+ reserved: true,
+ deprecation: { deprecated: '10.0', warning: '10.1', removed: '11.0', documentation: 'docs.gitlab.com' }
end
end
@@ -52,6 +53,12 @@ RSpec.describe Gitlab::Config::Entry::Configurable do
factory = entry.nodes[:object]
expect(factory).to be_an_instance_of(Gitlab::Config::Entry::Factory)
+ expect(factory.deprecation).to eq(
+ deprecated: '10.0',
+ warning: '10.1',
+ removed: '11.0',
+ documentation: 'docs.gitlab.com'
+ )
expect(factory.description).to eq('test object')
expect(factory.inheritable?).to eq(true)
expect(factory.reserved?).to eq(true)
diff --git a/spec/lib/gitlab/config/entry/factory_spec.rb b/spec/lib/gitlab/config/entry/factory_spec.rb
index a00c45169ef..260b5cf0ade 100644
--- a/spec/lib/gitlab/config/entry/factory_spec.rb
+++ b/spec/lib/gitlab/config/entry/factory_spec.rb
@@ -115,5 +115,16 @@ RSpec.describe Gitlab::Config::Entry::Factory do
.with('some value', { some: 'hash' })
end
end
+
+ context 'when setting deprecation information' do
+ it 'passes deprecation as a parameter' do
+ entry = factory
+ .value('some value')
+ .with(deprecation: { deprecated: '10.0', warning: '10.1', removed: '11.0', documentation: 'docs' })
+ .create!
+
+ expect(entry.deprecation).to eq({ deprecated: '10.0', warning: '10.1', removed: '11.0', documentation: 'docs' })
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 56e3fc269e6..08d29f7842c 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://cdn.example.com")
expect(directives['font_src']).to eq("'self' https://cdn.example.com")
expect(directives['worker_src']).to eq('http://localhost/assets/ blob: data: https://cdn.example.com')
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html")
+ expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid")
end
end
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'does not add CUSTOMER_PORTAL_URL to CSP' do
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html")
+ expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid")
end
end
@@ -123,7 +123,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
it 'adds CUSTOMER_PORTAL_URL to CSP' do
- expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html")
+ expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/ http://localhost/assets/ http://localhost/-/speedscope/index.html http://localhost/-/sandbox/mermaid")
end
end
end
diff --git a/spec/lib/gitlab/data_builder/archive_trace_spec.rb b/spec/lib/gitlab/data_builder/archive_trace_spec.rb
new file mode 100644
index 00000000000..a310b0f0a94
--- /dev/null
+++ b/spec/lib/gitlab/data_builder/archive_trace_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DataBuilder::ArchiveTrace do
+ let_it_be(:build) { create(:ci_build, :trace_artifact) }
+
+ describe '.build' do
+ let(:data) { described_class.build(build) }
+
+ it 'has correct attributes', :aggregate_failures do
+ expect(data[:object_kind]).to eq 'archive_trace'
+ expect(data[:trace_url]).to eq build.job_artifacts_trace.file.url
+ expect(data[:build_id]).to eq build.id
+ expect(data[:pipeline_id]).to eq build.pipeline_id
+ expect(data[:project]).to eq build.project.hook_attrs
+ end
+ end
+end
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index 75741c52579..ab8c8a51694 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
expect(data[:user_url]).to eq(expected_user_url)
expect(data[:commit_url]).to eq(expected_commit_url)
expect(data[:commit_title]).to eq(commit.title)
+ expect(data[:ref]).to eq(deployment.ref)
end
it 'does not include the deployable URL when there is no deployable' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 49714cfc4dd..01d61a525e6 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -336,8 +336,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '#smoothed_time_efficiency' do
- let(:migration) { create(:batched_background_migration, interval: 120.seconds) }
- let(:end_time) { Time.zone.now }
+ let_it_be(:migration) { create(:batched_background_migration, interval: 120.seconds) }
+ let_it_be(:end_time) { Time.zone.now }
around do |example|
freeze_time do
@@ -345,7 +345,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
- let(:common_attrs) do
+ let_it_be(:common_attrs) do
{
status: :succeeded,
batched_migration: migration,
@@ -364,13 +364,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
context 'when there are enough jobs' do
- subject { migration.smoothed_time_efficiency(number_of_jobs: number_of_jobs) }
+ let_it_be(:number_of_jobs) { 10 }
+ let_it_be(:jobs) { create_list(:batched_background_migration_job, number_of_jobs, **common_attrs.merge(batched_migration: migration)) }
- let!(:jobs) { create_list(:batched_background_migration_job, number_of_jobs, **common_attrs.merge(batched_migration: migration)) }
- let(:number_of_jobs) { 10 }
+ subject { migration.smoothed_time_efficiency(number_of_jobs: number_of_jobs) }
before do
- expect(migration).to receive_message_chain(:batched_jobs, :successful_in_execution_order, :reverse_order, :limit).with(no_args).with(no_args).with(number_of_jobs).and_return(jobs)
+ expect(migration).to receive_message_chain(:batched_jobs, :successful_in_execution_order, :reverse_order, :limit, :with_preloads)
+ .and_return(jobs)
end
def mock_efficiencies(*effs)
@@ -411,6 +412,18 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
end
+
+ context 'with preloaded batched migration' do
+ it 'avoids N+1' do
+ create_list(:batched_background_migration_job, 11, **common_attrs.merge(started_at: end_time - 10.seconds))
+
+ control = ActiveRecord::QueryRecorder.new do
+ migration.smoothed_time_efficiency(number_of_jobs: 10)
+ end
+
+ expect { migration.smoothed_time_efficiency(number_of_jobs: 11) }.not_to exceed_query_limit(control)
+ end
+ end
end
describe '#optimize!' do
diff --git a/spec/lib/gitlab/database/background_migration_job_spec.rb b/spec/lib/gitlab/database/background_migration_job_spec.rb
index 42695925a1c..1117c17c84a 100644
--- a/spec/lib/gitlab/database/background_migration_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration_job_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigrationJob do
it_behaves_like 'having unique enum values'
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe '.for_migration_execution' do
let!(:job1) { create(:background_migration_job) }
let!(:job2) { create(:background_migration_job, arguments: ['hi', 2]) }
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 9831510f014..028bdce852e 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -270,8 +270,6 @@ RSpec.describe Gitlab::Database::BatchCount do
end
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
- stub_feature_flags(loose_index_scan_for_distinct_values: false)
-
min_id = model.minimum(:id)
relation = instance_double(ActiveRecord::Relation)
allow(model).to receive_message_chain(:select, public_send: relation)
@@ -317,85 +315,13 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
- context 'when the loose_index_scan_for_distinct_values feature flag is off' do
- it_behaves_like 'when batch fetch query is canceled' do
- let(:mode) { :distinct }
- let(:operation) { :count }
- let(:operation_args) { nil }
- let(:column) { nil }
-
- subject { described_class.method(:batch_distinct_count) }
-
- before do
- stub_feature_flags(loose_index_scan_for_distinct_values: false)
- end
- end
- end
-
- context 'when the loose_index_scan_for_distinct_values feature flag is on' do
+ it_behaves_like 'when batch fetch query is canceled' do
let(:mode) { :distinct }
let(:operation) { :count }
let(:operation_args) { nil }
let(:column) { nil }
- let(:batch_size) { 10_000 }
-
subject { described_class.method(:batch_distinct_count) }
-
- before do
- stub_feature_flags(loose_index_scan_for_distinct_values: true)
- end
-
- it 'reduces batch size by half and retry fetch' do
- too_big_batch_relation_mock = instance_double(ActiveRecord::Relation)
-
- count_method = double(send: 1)
-
- allow(too_big_batch_relation_mock).to receive(:send).and_raise(ActiveRecord::QueryCanceled)
- allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: 0, to: batch_size).and_return(too_big_batch_relation_mock)
- allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: 0, to: batch_size / 2).and_return(count_method)
- allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: batch_size / 2, to: batch_size).and_return(count_method)
-
- subject.call(model, column, batch_size: batch_size, start: 0, finish: batch_size - 1)
- end
-
- context 'when all retries fail' do
- let(:batch_count_query) { 'SELECT COUNT(id) FROM relation WHERE id BETWEEN 0 and 1' }
-
- before do
- relation = instance_double(ActiveRecord::Relation)
- allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).and_return(relation)
- allow(relation).to receive(:send).and_raise(ActiveRecord::QueryCanceled.new('query timed out'))
- allow(relation).to receive(:to_sql).and_return(batch_count_query)
- end
-
- it 'logs failing query' do
- expect(Gitlab::AppJsonLogger).to receive(:error).with(
- event: 'batch_count',
- relation: model.table_name,
- operation: operation,
- operation_args: operation_args,
- start: 0,
- mode: mode,
- query: batch_count_query,
- message: 'Query has been canceled with message: query timed out'
- )
- expect(subject.call(model, column, batch_size: batch_size, start: 0)).to eq(-1)
- end
- end
-
- context 'when LooseIndexScanDistinctCount raises error' do
- let(:column) { :creator_id }
- let(:error_class) { Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError }
-
- it 'rescues ColumnConfigurationError' do
- allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive(:new).and_raise(error_class.new('error message'))
-
- expect(Gitlab::AppJsonLogger).to receive(:error).with(a_hash_including(message: 'LooseIndexScanDistinctCount column error: error message'))
-
- expect(subject.call(Project, column, batch_size: 10_000, start: 0)).to eq(-1)
- end
- end
end
end
diff --git a/spec/lib/gitlab/database/bulk_update_spec.rb b/spec/lib/gitlab/database/bulk_update_spec.rb
index 9a6463c99fa..08b4d50f83b 100644
--- a/spec/lib/gitlab/database/bulk_update_spec.rb
+++ b/spec/lib/gitlab/database/bulk_update_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe Gitlab::Database::BulkUpdate do
before do
configuration_hash = ActiveRecord::Base.connection_db_config.configuration_hash
- ActiveRecord::Base.establish_connection(
+ ActiveRecord::Base.establish_connection( # rubocop: disable Database/EstablishConnection
configuration_hash.merge(prepared_statements: prepared_statements)
)
end
diff --git a/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb b/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb
deleted file mode 100644
index e0eac26e4d9..00000000000
--- a/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::LooseIndexScanDistinctCount do
- context 'counting distinct users' do
- let_it_be(:user) { create(:user) }
- let_it_be(:other_user) { create(:user) }
-
- let(:column) { :creator_id }
-
- before_all do
- create_list(:project, 3, creator: user)
- create_list(:project, 1, creator: other_user)
- end
-
- subject(:count) { described_class.new(Project, :creator_id).count(from: Project.minimum(:creator_id), to: Project.maximum(:creator_id) + 1) }
-
- it { is_expected.to eq(2) }
-
- context 'when STI model is queried' do
- it 'does not raise error' do
- expect { described_class.new(Group, :owner_id).count(from: 0, to: 1) }.not_to raise_error
- end
- end
-
- context 'when model with default_scope is queried' do
- it 'does not raise error' do
- expect { described_class.new(GroupMember, :id).count(from: 0, to: 1) }.not_to raise_error
- end
- end
-
- context 'when the fully qualified column is given' do
- let(:column) { 'projects.creator_id' }
-
- it { is_expected.to eq(2) }
- end
-
- context 'when AR attribute is given' do
- let(:column) { Project.arel_table[:creator_id] }
-
- it { is_expected.to eq(2) }
- end
-
- context 'when invalid value is given for the column' do
- let(:column) { Class.new }
-
- it { expect { described_class.new(Group, column) }.to raise_error(Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError) }
- end
-
- context 'when null values are present' do
- before do
- create_list(:project, 2).each { |p| p.update_column(:creator_id, nil) }
- end
-
- it { is_expected.to eq(2) }
- end
- end
-
- context 'counting STI models' do
- let!(:groups) { create_list(:group, 3) }
- let!(:namespaces) { create_list(:namespace, 2) }
-
- let(:max_id) { Namespace.maximum(:id) + 1 }
-
- it 'counts groups' do
- count = described_class.new(Group, :id).count(from: 0, to: max_id)
- expect(count).to eq(3)
- end
- end
-end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 7f80bed04a4..7e3de32b965 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1752,116 +1752,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#change_column_type_using_background_migration' do
- let!(:issue) { create(:issue, :closed, closed_at: Time.zone.now) }
-
- let(:issue_model) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'issues'
- include EachBatch
- end
- end
-
- it 'changes the type of a column using a background migration' do
- expect(model)
- .to receive(:add_column)
- .with('issues', 'closed_at_for_type_change', :datetime_with_timezone)
-
- expect(model)
- .to receive(:install_rename_triggers)
- .with('issues', :closed_at, 'closed_at_for_type_change')
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .ordered
- .with(
- 10.minutes,
- 'CopyColumn',
- ['issues', :closed_at, 'closed_at_for_type_change', issue.id, issue.id]
- )
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .ordered
- .with(
- 1.hour + 10.minutes,
- 'CleanupConcurrentTypeChange',
- ['issues', :closed_at, 'closed_at_for_type_change']
- )
-
- expect(Gitlab::BackgroundMigration)
- .to receive(:steal)
- .ordered
- .with('CopyColumn')
-
- expect(Gitlab::BackgroundMigration)
- .to receive(:steal)
- .ordered
- .with('CleanupConcurrentTypeChange')
-
- model.change_column_type_using_background_migration(
- issue_model.all,
- :closed_at,
- :datetime_with_timezone
- )
- end
- end
-
- describe '#rename_column_using_background_migration' do
- let!(:issue) { create(:issue, :closed, closed_at: Time.zone.now) }
-
- it 'renames a column using a background migration' do
- expect(model)
- .to receive(:add_column)
- .with(
- 'issues',
- :closed_at_timestamp,
- :datetime_with_timezone,
- limit: anything,
- precision: anything,
- scale: anything
- )
-
- expect(model)
- .to receive(:install_rename_triggers)
- .with('issues', :closed_at, :closed_at_timestamp)
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .ordered
- .with(
- 10.minutes,
- 'CopyColumn',
- ['issues', :closed_at, :closed_at_timestamp, issue.id, issue.id]
- )
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .ordered
- .with(
- 1.hour + 10.minutes,
- 'CleanupConcurrentRename',
- ['issues', :closed_at, :closed_at_timestamp]
- )
-
- expect(Gitlab::BackgroundMigration)
- .to receive(:steal)
- .ordered
- .with('CopyColumn')
-
- expect(Gitlab::BackgroundMigration)
- .to receive(:steal)
- .ordered
- .with('CleanupConcurrentRename')
-
- model.rename_column_using_background_migration(
- 'issues',
- :closed_at,
- :closed_at_timestamp
- )
- end
- end
-
describe '#convert_to_bigint_column' do
it 'returns the name of the temporary column used to convert to bigint' do
expect(model.convert_to_bigint_column(:id)).to eq('id_convert_to_bigint')
@@ -2065,8 +1955,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
t.integer :other_id
t.timestamps
end
-
- allow(model).to receive(:perform_background_migration_inline?).and_return(false)
end
context 'when the target table does not exist' do
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index 99c7d70724c..0abb76b9f8a 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -7,249 +7,208 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
ActiveRecord::Migration.new.extend(described_class)
end
- describe '#queue_background_migration_jobs_by_range_at_intervals' do
- context 'when the model has an ID column' do
- let!(:id1) { create(:user).id }
- let!(:id2) { create(:user).id }
- let!(:id3) { create(:user).id }
-
- around do |example|
- freeze_time { example.run }
- end
-
- before do
- User.class_eval do
- include EachBatch
- end
- end
+ shared_examples_for 'helpers that enqueue background migrations' do |worker_class, tracking_database|
+ before do
+ allow(model).to receive(:tracking_database).and_return(tracking_database)
+ end
- it 'returns the final expected delay' do
- Sidekiq::Testing.fake! do
- final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2)
+ describe '#queue_background_migration_jobs_by_range_at_intervals' do
+ context 'when the model has an ID column' do
+ let!(:id1) { create(:user).id }
+ let!(:id2) { create(:user).id }
+ let!(:id3) { create(:user).id }
- expect(final_delay.to_f).to eq(20.minutes.to_f)
+ around do |example|
+ freeze_time { example.run }
end
- end
-
- it 'returns zero when nothing gets queued' do
- Sidekiq::Testing.fake! do
- final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User.none, 'FooJob', 10.minutes)
- expect(final_delay).to eq(0)
+ before do
+ User.class_eval do
+ include EachBatch
+ end
end
- end
- context 'with batch_size option' do
- it 'queues jobs correctly' do
+ it 'returns the final expected delay' do
Sidekiq::Testing.fake! do
- model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2)
+ final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2)
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
- expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(20.minutes.from_now.to_f)
+ expect(final_delay.to_f).to eq(20.minutes.to_f)
end
end
- end
- context 'without batch_size option' do
- it 'queues jobs correctly' do
+ it 'returns zero when nothing gets queued' do
Sidekiq::Testing.fake! do
- model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes)
+ final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User.none, 'FooJob', 10.minutes)
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ expect(final_delay).to eq(0)
end
end
- end
- context 'with other_job_arguments option' do
- it 'queues jobs correctly' do
- Sidekiq::Testing.fake! do
- model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2])
+ context 'when the delay_interval is smaller than the minimum' do
+ it 'sets the delay_interval to the minimum value' do
+ Sidekiq::Testing.fake! do
+ final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 1.minute, batch_size: 2)
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ expect(worker_class.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
+ expect(worker_class.jobs[0]['at']).to eq(2.minutes.from_now.to_f)
+ expect(worker_class.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
+ expect(worker_class.jobs[1]['at']).to eq(4.minutes.from_now.to_f)
+
+ expect(final_delay.to_f).to eq(4.minutes.to_f)
+ end
end
end
- end
- context 'with initial_delay option' do
- it 'queues jobs correctly' do
- Sidekiq::Testing.fake! do
- model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2], initial_delay: 10.minutes)
+ context 'with batch_size option' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2)
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(20.minutes.from_now.to_f)
+ expect(worker_class.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
+ expect(worker_class.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ expect(worker_class.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
+ expect(worker_class.jobs[1]['at']).to eq(20.minutes.from_now.to_f)
+ end
end
end
- end
-
- context 'with track_jobs option' do
- it 'creates a record for each job in the database' do
- Sidekiq::Testing.fake! do
- expect do
- model.queue_background_migration_jobs_by_range_at_intervals(User, '::FooJob', 10.minutes,
- other_job_arguments: [1, 2], track_jobs: true)
- end.to change { Gitlab::Database::BackgroundMigrationJob.count }.from(0).to(1)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- tracked_job = Gitlab::Database::BackgroundMigrationJob.first
+ context 'without batch_size option' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes)
- expect(tracked_job.class_name).to eq('FooJob')
- expect(tracked_job.arguments).to eq([id1, id3, 1, 2])
- expect(tracked_job).to be_pending
+ expect(worker_class.jobs[0]['args']).to eq(['FooJob', [id1, id3]])
+ expect(worker_class.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ end
end
end
- end
- context 'without track_jobs option' do
- it 'does not create records in the database' do
- Sidekiq::Testing.fake! do
- expect do
+ context 'with other_job_arguments option' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2])
- end.not_to change { Gitlab::Database::BackgroundMigrationJob.count }
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(worker_class.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
+ expect(worker_class.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ end
end
end
- end
- end
-
- context 'when the model specifies a primary_column_name' do
- let!(:id1) { create(:container_expiration_policy).id }
- let!(:id2) { create(:container_expiration_policy).id }
- let!(:id3) { create(:container_expiration_policy).id }
- around do |example|
- freeze_time { example.run }
- end
+ context 'with initial_delay option' do
+ it 'queues jobs correctly' do
+ Sidekiq::Testing.fake! do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2], initial_delay: 10.minutes)
- before do
- ContainerExpirationPolicy.class_eval do
- include EachBatch
+ expect(worker_class.jobs[0]['args']).to eq(['FooJob', [id1, id3, 1, 2]])
+ expect(worker_class.jobs[0]['at']).to eq(20.minutes.from_now.to_f)
+ end
+ end
end
- end
- it 'returns the final expected delay', :aggregate_failures do
- Sidekiq::Testing.fake! do
- final_delay = model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, batch_size: 2, primary_column_name: :project_id)
+ context 'with track_jobs option' do
+ it 'creates a record for each job in the database' do
+ Sidekiq::Testing.fake! do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, '::FooJob', 10.minutes,
+ other_job_arguments: [1, 2], track_jobs: true)
+ end.to change { Gitlab::Database::BackgroundMigrationJob.count }.from(0).to(1)
- expect(final_delay.to_f).to eq(20.minutes.to_f)
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
- expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(20.minutes.from_now.to_f)
- end
- end
+ expect(worker_class.jobs.size).to eq(1)
- context "when the primary_column_name is not an integer" do
- it 'raises error' do
- expect do
- model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :enabled)
- end.to raise_error(StandardError, /is not an integer column/)
- end
- end
+ tracked_job = Gitlab::Database::BackgroundMigrationJob.first
- context "when the primary_column_name does not exist" do
- it 'raises error' do
- expect do
- model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :foo)
- end.to raise_error(StandardError, /does not have an ID column of foo/)
+ expect(tracked_job.class_name).to eq('FooJob')
+ expect(tracked_job.arguments).to eq([id1, id3, 1, 2])
+ expect(tracked_job).to be_pending
+ end
+ end
end
- end
- end
-
- context "when the model doesn't have an ID or primary_column_name column" do
- it 'raises error (for now)' do
- expect do
- model.queue_background_migration_jobs_by_range_at_intervals(ProjectAuthorization, 'FooJob', 10.seconds)
- end.to raise_error(StandardError, /does not have an ID/)
- end
- end
- end
- describe '#requeue_background_migration_jobs_by_range_at_intervals' do
- let!(:job_class_name) { 'TestJob' }
- let!(:pending_job_1) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1, 2]) }
- let!(:pending_job_2) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [3, 4]) }
- let!(:successful_job_1) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [5, 6]) }
- let!(:successful_job_2) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [7, 8]) }
+ context 'without track_jobs option' do
+ it 'does not create records in the database' do
+ Sidekiq::Testing.fake! do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, other_job_arguments: [1, 2])
+ end.not_to change { Gitlab::Database::BackgroundMigrationJob.count }
- around do |example|
- freeze_time do
- Sidekiq::Testing.fake! do
- example.run
+ expect(worker_class.jobs.size).to eq(1)
+ end
+ end
end
end
- end
-
- subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes) }
-
- it 'returns the expected duration' do
- expect(subject).to eq(20.minutes)
- end
- context 'when nothing is queued' do
- subject { model.requeue_background_migration_jobs_by_range_at_intervals('FakeJob', 10.minutes) }
+ context 'when the model specifies a primary_column_name' do
+ let!(:id1) { create(:container_expiration_policy).id }
+ let!(:id2) { create(:container_expiration_policy).id }
+ let!(:id3) { create(:container_expiration_policy).id }
- it 'returns expected duration of zero when nothing gets queued' do
- expect(subject).to eq(0)
- end
- end
-
- it 'queues pending jobs' do
- subject
+ around do |example|
+ freeze_time { example.run }
+ end
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([job_class_name, [1, 2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to be_nil
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([job_class_name, [3, 4]])
- expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(10.minutes.from_now.to_f)
- end
+ before do
+ ContainerExpirationPolicy.class_eval do
+ include EachBatch
+ end
+ end
- context 'with batch_size option' do
- subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes, batch_size: 1) }
+ it 'returns the final expected delay', :aggregate_failures do
+ Sidekiq::Testing.fake! do
+ final_delay = model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, batch_size: 2, primary_column_name: :project_id)
- it 'returns the expected duration' do
- expect(subject).to eq(20.minutes)
- end
+ expect(final_delay.to_f).to eq(20.minutes.to_f)
+ expect(worker_class.jobs[0]['args']).to eq(['FooJob', [id1, id2]])
+ expect(worker_class.jobs[0]['at']).to eq(10.minutes.from_now.to_f)
+ expect(worker_class.jobs[1]['args']).to eq(['FooJob', [id3, id3]])
+ expect(worker_class.jobs[1]['at']).to eq(20.minutes.from_now.to_f)
+ end
+ end
- it 'queues pending jobs' do
- subject
+ context "when the primary_column_name is not an integer" do
+ it 'raises error' do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :enabled)
+ end.to raise_error(StandardError, /is not an integer column/)
+ end
+ end
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([job_class_name, [1, 2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to be_nil
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([job_class_name, [3, 4]])
- expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(10.minutes.from_now.to_f)
+ context "when the primary_column_name does not exist" do
+ it 'raises error' do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(ContainerExpirationPolicy, 'FooJob', 10.minutes, primary_column_name: :foo)
+ end.to raise_error(StandardError, /does not have an ID column of foo/)
+ end
+ end
end
- it 'retrieve jobs in batches' do
- jobs = double('jobs')
- expect(Gitlab::Database::BackgroundMigrationJob).to receive(:pending) { jobs }
- allow(jobs).to receive(:where).with(class_name: job_class_name) { jobs }
- expect(jobs).to receive(:each_batch).with(of: 1)
-
- subject
+ context "when the model doesn't have an ID or primary_column_name column" do
+ it 'raises error (for now)' do
+ expect do
+ model.queue_background_migration_jobs_by_range_at_intervals(ProjectAuthorization, 'FooJob', 10.seconds)
+ end.to raise_error(StandardError, /does not have an ID/)
+ end
end
end
- context 'with initial_delay option' do
- let_it_be(:initial_delay) { 3.minutes }
+ describe '#requeue_background_migration_jobs_by_range_at_intervals' do
+ let!(:job_class_name) { 'TestJob' }
+ let!(:pending_job_1) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1, 2]) }
+ let!(:pending_job_2) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [3, 4]) }
+ let!(:successful_job_1) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [5, 6]) }
+ let!(:successful_job_2) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [7, 8]) }
- subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes, initial_delay: initial_delay) }
-
- it 'returns the expected duration' do
- expect(subject).to eq(23.minutes)
+ around do |example|
+ freeze_time do
+ Sidekiq::Testing.fake! do
+ example.run
+ end
+ end
end
- it 'queues pending jobs' do
- subject
+ subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes) }
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([job_class_name, [1, 2]])
- expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(3.minutes.from_now.to_f)
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([job_class_name, [3, 4]])
- expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(13.minutes.from_now.to_f)
+ it 'returns the expected duration' do
+ expect(subject).to eq(20.minutes)
end
context 'when nothing is queued' do
@@ -259,195 +218,226 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
expect(subject).to eq(0)
end
end
- end
- end
- describe '#perform_background_migration_inline?' do
- it 'returns true in a test environment' do
- stub_rails_env('test')
+ it 'queues pending jobs' do
+ subject
- expect(model.perform_background_migration_inline?).to eq(true)
- end
+ expect(worker_class.jobs[0]['args']).to eq([job_class_name, [1, 2]])
+ expect(worker_class.jobs[0]['at']).to be_nil
+ expect(worker_class.jobs[1]['args']).to eq([job_class_name, [3, 4]])
+ expect(worker_class.jobs[1]['at']).to eq(10.minutes.from_now.to_f)
+ end
- it 'returns true in a development environment' do
- stub_rails_env('development')
+ context 'with batch_size option' do
+ subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes, batch_size: 1) }
- expect(model.perform_background_migration_inline?).to eq(true)
- end
+ it 'returns the expected duration' do
+ expect(subject).to eq(20.minutes)
+ end
- it 'returns false in a production environment' do
- stub_rails_env('production')
+ it 'queues pending jobs' do
+ subject
- expect(model.perform_background_migration_inline?).to eq(false)
- end
- end
+ expect(worker_class.jobs[0]['args']).to eq([job_class_name, [1, 2]])
+ expect(worker_class.jobs[0]['at']).to be_nil
+ expect(worker_class.jobs[1]['args']).to eq([job_class_name, [3, 4]])
+ expect(worker_class.jobs[1]['at']).to eq(10.minutes.from_now.to_f)
+ end
- describe '#migrate_async' do
- it 'calls BackgroundMigrationWorker.perform_async' do
- expect(BackgroundMigrationWorker).to receive(:perform_async).with("Class", "hello", "world")
+ it 'retrieve jobs in batches' do
+ jobs = double('jobs')
+ expect(Gitlab::Database::BackgroundMigrationJob).to receive(:pending) { jobs }
+ allow(jobs).to receive(:where).with(class_name: job_class_name) { jobs }
+ expect(jobs).to receive(:each_batch).with(of: 1)
- model.migrate_async("Class", "hello", "world")
- end
+ subject
+ end
+ end
- it 'pushes a context with the current class name as caller_id' do
- expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+ context 'with initial_delay option' do
+ let_it_be(:initial_delay) { 3.minutes }
- model.migrate_async('Class', 'hello', 'world')
- end
- end
+ subject { model.requeue_background_migration_jobs_by_range_at_intervals(job_class_name, 10.minutes, initial_delay: initial_delay) }
- describe '#migrate_in' do
- it 'calls BackgroundMigrationWorker.perform_in' do
- expect(BackgroundMigrationWorker).to receive(:perform_in).with(10.minutes, 'Class', 'Hello', 'World')
+ it 'returns the expected duration' do
+ expect(subject).to eq(23.minutes)
+ end
- model.migrate_in(10.minutes, 'Class', 'Hello', 'World')
- end
+ it 'queues pending jobs' do
+ subject
+
+ expect(worker_class.jobs[0]['args']).to eq([job_class_name, [1, 2]])
+ expect(worker_class.jobs[0]['at']).to eq(3.minutes.from_now.to_f)
+ expect(worker_class.jobs[1]['args']).to eq([job_class_name, [3, 4]])
+ expect(worker_class.jobs[1]['at']).to eq(13.minutes.from_now.to_f)
+ end
- it 'pushes a context with the current class name as caller_id' do
- expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+ context 'when nothing is queued' do
+ subject { model.requeue_background_migration_jobs_by_range_at_intervals('FakeJob', 10.minutes) }
- model.migrate_in(10.minutes, 'Class', 'Hello', 'World')
+ it 'returns expected duration of zero when nothing gets queued' do
+ expect(subject).to eq(0)
+ end
+ end
+ end
end
- end
- describe '#bulk_migrate_async' do
- it 'calls BackgroundMigrationWorker.bulk_perform_async' do
- expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([%w(Class hello world)])
+ describe '#finalized_background_migration' do
+ let(:coordinator) { Gitlab::BackgroundMigration::JobCoordinator.new(worker_class) }
- model.bulk_migrate_async([%w(Class hello world)])
- end
+ let!(:tracked_pending_job) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1]) }
+ let!(:tracked_successful_job) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [2]) }
+ let!(:job_class_name) { 'TestJob' }
- it 'pushes a context with the current class name as caller_id' do
- expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+ let!(:job_class) do
+ Class.new do
+ def perform(*arguments)
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded('TestJob', arguments)
+ end
+ end
+ end
- model.bulk_migrate_async([%w(Class hello world)])
- end
- end
+ before do
+ allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database)
+ .with('main').and_return(coordinator)
- describe '#bulk_migrate_in' do
- it 'calls BackgroundMigrationWorker.bulk_perform_in_' do
- expect(BackgroundMigrationWorker).to receive(:bulk_perform_in).with(10.minutes, [%w(Class hello world)])
+ expect(coordinator).to receive(:migration_class_for)
+ .with(job_class_name).at_least(:once) { job_class }
- model.bulk_migrate_in(10.minutes, [%w(Class hello world)])
- end
+ Sidekiq::Testing.disable! do
+ worker_class.perform_async(job_class_name, [1, 2])
+ worker_class.perform_async(job_class_name, [3, 4])
+ worker_class.perform_in(10, job_class_name, [5, 6])
+ worker_class.perform_in(20, job_class_name, [7, 8])
+ end
+ end
- it 'pushes a context with the current class name as caller_id' do
- expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
+ it_behaves_like 'finalized tracked background migration', worker_class do
+ before do
+ model.finalize_background_migration(job_class_name)
+ end
+ end
- model.bulk_migrate_in(10.minutes, [%w(Class hello world)])
- end
- end
+ context 'when removing all tracked job records' do
+ let!(:job_class) do
+ Class.new do
+ def perform(*arguments)
+ # Force pending jobs to remain pending
+ end
+ end
+ end
- describe '#delete_queued_jobs' do
- let(:job1) { double }
- let(:job2) { double }
+ before do
+ model.finalize_background_migration(job_class_name, delete_tracking_jobs: %w[pending succeeded])
+ end
- it 'deletes all queued jobs for the given background migration' do
- expect(Gitlab::BackgroundMigration).to receive(:steal).with('BackgroundMigrationClassName') do |&block|
- expect(block.call(job1)).to be(false)
- expect(block.call(job2)).to be(false)
+ it_behaves_like 'finalized tracked background migration', worker_class
+ it_behaves_like 'removed tracked jobs', 'pending'
+ it_behaves_like 'removed tracked jobs', 'succeeded'
end
- expect(job1).to receive(:delete)
- expect(job2).to receive(:delete)
+ context 'when retaining all tracked job records' do
+ before do
+ model.finalize_background_migration(job_class_name, delete_tracking_jobs: false)
+ end
- model.delete_queued_jobs('BackgroundMigrationClassName')
- end
- end
+ it_behaves_like 'finalized background migration', worker_class
+ include_examples 'retained tracked jobs', 'succeeded'
+ end
- describe '#finalized_background_migration' do
- let(:job_coordinator) { Gitlab::BackgroundMigration::JobCoordinator.new(BackgroundMigrationWorker) }
+ context 'during retry race condition' do
+ let!(:job_class) do
+ Class.new do
+ class << self
+ attr_accessor :worker_class
- let!(:job_class_name) { 'TestJob' }
- let!(:job_class) { Class.new }
- let!(:job_perform_method) do
- ->(*arguments) do
- Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
- # Value is 'TestJob' defined by :job_class_name in the let! above.
- # Scoping prohibits us from directly referencing job_class_name.
- RSpec.current_example.example_group_instance.job_class_name,
- arguments
- )
- end
- end
+ def queue_items_added
+ @queue_items_added ||= []
+ end
+ end
- let!(:tracked_pending_job) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1]) }
- let!(:tracked_successful_job) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [2]) }
+ def worker_class
+ self.class.worker_class
+ end
- before do
- job_class.define_method(:perform, job_perform_method)
+ def queue_items_added
+ self.class.queue_items_added
+ end
- allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database)
- .with('main').and_return(job_coordinator)
+ def perform(*arguments)
+ Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded('TestJob', arguments)
- expect(job_coordinator).to receive(:migration_class_for)
- .with(job_class_name).at_least(:once) { job_class }
+ # Mock another process pushing queue jobs.
+ if self.class.queue_items_added.count < 10
+ Sidekiq::Testing.disable! do
+ queue_items_added << worker_class.perform_async('TestJob', [Time.current])
+ queue_items_added << worker_class.perform_in(10, 'TestJob', [Time.current])
+ end
+ end
+ end
+ end
+ end
- Sidekiq::Testing.disable! do
- BackgroundMigrationWorker.perform_async(job_class_name, [1, 2])
- BackgroundMigrationWorker.perform_async(job_class_name, [3, 4])
- BackgroundMigrationWorker.perform_in(10, job_class_name, [5, 6])
- BackgroundMigrationWorker.perform_in(20, job_class_name, [7, 8])
- end
- end
+ it_behaves_like 'finalized tracked background migration', worker_class do
+ before do
+ # deliberately set the worker class on our test job since it won't be pulled from the surrounding scope
+ job_class.worker_class = worker_class
- it_behaves_like 'finalized tracked background migration' do
- before do
- model.finalize_background_migration(job_class_name)
+ model.finalize_background_migration(job_class_name, delete_tracking_jobs: ['succeeded'])
+ end
+ end
end
end
- context 'when removing all tracked job records' do
- # Force pending jobs to remain pending.
- let!(:job_perform_method) { ->(*arguments) { } }
+ describe '#migrate_in' do
+ it 'calls perform_in for the correct worker' do
+ expect(worker_class).to receive(:perform_in).with(10.minutes, 'Class', 'Hello', 'World')
- before do
- model.finalize_background_migration(job_class_name, delete_tracking_jobs: %w[pending succeeded])
+ model.migrate_in(10.minutes, 'Class', 'Hello', 'World')
end
- it_behaves_like 'finalized tracked background migration'
- it_behaves_like 'removed tracked jobs', 'pending'
- it_behaves_like 'removed tracked jobs', 'succeeded'
- end
+ it 'pushes a context with the current class name as caller_id' do
+ expect(Gitlab::ApplicationContext).to receive(:with_context).with(caller_id: model.class.to_s)
- context 'when retaining all tracked job records' do
- before do
- model.finalize_background_migration(job_class_name, delete_tracking_jobs: false)
+ model.migrate_in(10.minutes, 'Class', 'Hello', 'World')
end
- it_behaves_like 'finalized background migration'
- include_examples 'retained tracked jobs', 'succeeded'
- end
+ context 'when a specific coordinator is given' do
+ let(:coordinator) { Gitlab::BackgroundMigration::JobCoordinator.for_tracking_database('main') }
- context 'during retry race condition' do
- let(:queue_items_added) { [] }
- let!(:job_perform_method) do
- ->(*arguments) do
- Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded(
- RSpec.current_example.example_group_instance.job_class_name,
- arguments
- )
-
- # Mock another process pushing queue jobs.
- queue_items_added = RSpec.current_example.example_group_instance.queue_items_added
- if queue_items_added.count < 10
- Sidekiq::Testing.disable! do
- job_class_name = RSpec.current_example.example_group_instance.job_class_name
- queue_items_added << BackgroundMigrationWorker.perform_async(job_class_name, [Time.current])
- queue_items_added << BackgroundMigrationWorker.perform_in(10, job_class_name, [Time.current])
- end
- end
+ it 'uses that coordinator' do
+ expect(coordinator).to receive(:perform_in).with(10.minutes, 'Class', 'Hello', 'World').and_call_original
+ expect(worker_class).to receive(:perform_in).with(10.minutes, 'Class', 'Hello', 'World')
+
+ model.migrate_in(10.minutes, 'Class', 'Hello', 'World', coordinator: coordinator)
end
end
+ end
- it_behaves_like 'finalized tracked background migration' do
- before do
- model.finalize_background_migration(job_class_name, delete_tracking_jobs: ['succeeded'])
+ describe '#delete_queued_jobs' do
+ let(:job1) { double }
+ let(:job2) { double }
+
+ it 'deletes all queued jobs for the given background migration' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::JobCoordinator) do |coordinator|
+ expect(coordinator).to receive(:steal).with('BackgroundMigrationClassName') do |&block|
+ expect(block.call(job1)).to be(false)
+ expect(block.call(job2)).to be(false)
+ end
end
+
+ expect(job1).to receive(:delete)
+ expect(job2).to receive(:delete)
+
+ model.delete_queued_jobs('BackgroundMigrationClassName')
end
end
end
+ context 'when the migration is running against the main database' do
+ it_behaves_like 'helpers that enqueue background migrations', BackgroundMigrationWorker, 'main'
+ end
+
describe '#delete_job_tracking' do
let!(:job_class_name) { 'TestJob' }
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index 4616bd6941e..7dc965c84fa 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
allow(ActiveRecord::Migrator).to receive(:new) do |dir, _all_migrations, _schema_migration_class, version_to_migrate|
migrator = double(ActiveRecord::Migrator)
expect(migrator).to receive(:run) do
- migration_runs << OpenStruct.new(dir: dir, version_to_migrate: version_to_migrate)
+ migration_runs << double('migrator', dir: dir, version_to_migrate: version_to_migrate)
end
migrator
end
diff --git a/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
new file mode 100644
index 00000000000..e5a8143fcc3
--- /dev/null
+++ b/spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'cross-database foreign keys' do
+ # TODO: We are trying to empty out this list in
+ # https://gitlab.com/groups/gitlab-org/-/epics/7249 . Once we are done we can
+ # keep this test and assert that there are no cross-db foreign keys. We
+ # should not be adding anything to this list but should instead only add new
+ # loose foreign keys
+ # https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html .
+ let(:allowed_cross_database_foreign_keys) do
+ %w(
+ ci_build_report_results.project_id
+ ci_builds.project_id
+ ci_builds_metadata.project_id
+ ci_daily_build_group_report_results.group_id
+ ci_daily_build_group_report_results.project_id
+ ci_freeze_periods.project_id
+ ci_job_artifacts.project_id
+ ci_job_token_project_scope_links.added_by_id
+ ci_job_token_project_scope_links.source_project_id
+ ci_job_token_project_scope_links.target_project_id
+ ci_pending_builds.namespace_id
+ ci_pending_builds.project_id
+ ci_pipeline_schedules.owner_id
+ ci_pipeline_schedules.project_id
+ ci_pipelines.merge_request_id
+ ci_pipelines.project_id
+ ci_project_monthly_usages.project_id
+ ci_refs.project_id
+ ci_resource_groups.project_id
+ ci_runner_namespaces.namespace_id
+ ci_runner_projects.project_id
+ ci_running_builds.project_id
+ ci_sources_pipelines.project_id
+ ci_sources_pipelines.source_project_id
+ ci_sources_projects.source_project_id
+ ci_stages.project_id
+ ci_subscriptions_projects.downstream_project_id
+ ci_subscriptions_projects.upstream_project_id
+ ci_triggers.owner_id
+ ci_triggers.project_id
+ ci_unit_tests.project_id
+ ci_variables.project_id
+ dast_profiles_pipelines.ci_pipeline_id
+ dast_scanner_profiles_builds.ci_build_id
+ dast_site_profiles_builds.ci_build_id
+ dast_site_profiles_pipelines.ci_pipeline_id
+ external_pull_requests.project_id
+ merge_requests.head_pipeline_id
+ merge_trains.pipeline_id
+ requirements_management_test_reports.build_id
+ security_scans.build_id
+ vulnerability_feedback.pipeline_id
+ vulnerability_occurrence_pipelines.pipeline_id
+ vulnerability_statistics.latest_pipeline_id
+ ).freeze
+ end
+
+ def foreign_keys_for(table_name)
+ ApplicationRecord.connection.foreign_keys(table_name)
+ end
+
+ def is_cross_db?(fk_record)
+ Gitlab::Database::GitlabSchema.table_schemas([fk_record.from_table, fk_record.to_table]).many?
+ end
+
+ it 'onlies have allowed list of cross-database foreign keys', :aggregate_failures do
+ all_tables = ApplicationRecord.connection.data_sources
+
+ all_tables.each do |table|
+ foreign_keys_for(table).each do |fk|
+ if is_cross_db?(fk)
+ column = "#{fk.from_table}.#{fk.column}"
+ expect(allowed_cross_database_foreign_keys).to include(column), "Found extra cross-database foreign key #{column} referencing #{fk.to_table} with constraint name #{fk.name}. When a foreign key references another database you must use a Loose Foreign Key instead https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html ."
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index 5e107109fc9..64dcdb9628a 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
- let(:table) { "some_table" }
+ let(:table) { "issues" }
before do
allow(connection).to receive(:table_exists?).and_call_original
@@ -36,6 +36,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
it 'creates the partition' do
+ expect(connection).to receive(:execute).with("LOCK TABLE \"#{table}\" IN ACCESS EXCLUSIVE MODE")
expect(connection).to receive(:execute).with(partitions.first.to_sql)
expect(connection).to receive(:execute).with(partitions.second.to_sql)
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 636a09e5710..1cec0463055 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
let(:connection) { ActiveRecord::Base.connection }
let(:table_name) { :_test_partitioned_test }
- let(:model) { double('model', table_name: table_name, ignored_columns: %w[partition]) }
+ let(:model) { double('model', table_name: table_name, ignored_columns: %w[partition], connection: connection) }
let(:next_partition_if) { double('next_partition_if') }
let(:detach_partition_if) { double('detach_partition_if') }
@@ -94,7 +94,8 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
let(:detach_partition_if) { ->(p) { p != 5 } }
it 'is the leading set of partitions before that value' do
- expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4)
+ # should not contain partition 2 since it's the default value for the partition column
+ expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 3, 4)
end
end
@@ -102,7 +103,7 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
let(:detach_partition_if) { proc { true } }
it 'is all but the most recent partition', :aggregate_failures do
- expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 2, 3, 4, 5, 6, 7, 8, 9)
+ expect(strategy.extra_partitions.map(&:value)).to contain_exactly(1, 3, 4, 5, 6, 7, 8, 9)
expect(strategy.current_partitions.map(&:value).max).to eq(10)
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
index c43b51e10a0..3072c413246 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
@@ -3,14 +3,15 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable, '#perform' do
- subject { described_class.new }
+ subject(:backfill_job) { described_class.new(connection: connection) }
+ let(:connection) { ActiveRecord::Base.connection }
let(:source_table) { '_test_partitioning_backfills' }
let(:destination_table) { "#{source_table}_part" }
let(:unique_key) { 'id' }
before do
- allow(subject).to receive(:transaction_open?).and_return(false)
+ allow(backfill_job).to receive(:transaction_open?).and_return(false)
end
context 'when the destination table exists' do
@@ -50,10 +51,9 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
stub_const("#{described_class}::SUB_BATCH_SIZE", 2)
stub_const("#{described_class}::PAUSE_SECONDS", pause_seconds)
- allow(subject).to receive(:sleep)
+ allow(backfill_job).to receive(:sleep)
end
- let(:connection) { ActiveRecord::Base.connection }
let(:source_model) { Class.new(ActiveRecord::Base) }
let(:destination_model) { Class.new(ActiveRecord::Base) }
let(:timestamp) { Time.utc(2020, 1, 2).round }
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
it 'copies data into the destination table idempotently' do
expect(destination_model.count).to eq(0)
- subject.perform(source1.id, source3.id, source_table, destination_table, unique_key)
+ backfill_job.perform(source1.id, source3.id, source_table, destination_table, unique_key)
expect(destination_model.count).to eq(3)
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
expect(destination_record.attributes).to eq(source_record.attributes)
end
- subject.perform(source1.id, source3.id, source_table, destination_table, unique_key)
+ backfill_job.perform(source1.id, source3.id, source_table, destination_table, unique_key)
expect(destination_model.count).to eq(3)
end
@@ -87,13 +87,13 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
expect(bulk_copy).to receive(:copy_between).with(source3.id, source3.id)
end
- subject.perform(source1.id, source3.id, source_table, destination_table, unique_key)
+ backfill_job.perform(source1.id, source3.id, source_table, destination_table, unique_key)
end
it 'pauses after copying each sub-batch' do
- expect(subject).to receive(:sleep).with(pause_seconds).twice
+ expect(backfill_job).to receive(:sleep).with(pause_seconds).twice
- subject.perform(source1.id, source3.id, source_table, destination_table, unique_key)
+ backfill_job.perform(source1.id, source3.id, source_table, destination_table, unique_key)
end
it 'marks each job record as succeeded after processing' do
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
expect(::Gitlab::Database::BackgroundMigrationJob).to receive(:mark_all_as_succeeded).and_call_original
expect do
- subject.perform(source1.id, source3.id, source_table, destination_table, unique_key)
+ backfill_job.perform(source1.id, source3.id, source_table, destination_table, unique_key)
end.to change { ::Gitlab::Database::BackgroundMigrationJob.succeeded.count }.from(0).to(1)
end
@@ -111,24 +111,24 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
create(:background_migration_job, class_name: "::#{described_class.name}",
arguments: [source1.id, source3.id, source_table, destination_table, unique_key])
- jobs_updated = subject.perform(source1.id, source3.id, source_table, destination_table, unique_key)
+ jobs_updated = backfill_job.perform(source1.id, source3.id, source_table, destination_table, unique_key)
expect(jobs_updated).to eq(1)
end
context 'when the job is run within an explicit transaction block' do
- let(:mock_connection) { double('connection') }
+ subject(:backfill_job) { described_class.new(connection: mock_connection) }
- before do
- allow(subject).to receive(:connection).and_return(mock_connection)
- allow(subject).to receive(:transaction_open?).and_return(true)
- end
+ let(:mock_connection) { double('connection') }
it 'raises an error before copying data' do
+ expect(backfill_job).to receive(:transaction_open?).and_call_original
+
+ expect(mock_connection).to receive(:transaction_open?).and_return(true)
expect(mock_connection).not_to receive(:execute)
expect do
- subject.perform(1, 100, source_table, destination_table, unique_key)
+ backfill_job.perform(1, 100, source_table, destination_table, unique_key)
end.to raise_error(/Aborting job to backfill partitioned #{source_table}/)
expect(destination_model.count).to eq(0)
@@ -137,24 +137,25 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
end
context 'when the destination table does not exist' do
+ subject(:backfill_job) { described_class.new(connection: mock_connection) }
+
let(:mock_connection) { double('connection') }
let(:mock_logger) { double('logger') }
before do
- allow(subject).to receive(:connection).and_return(mock_connection)
- allow(subject).to receive(:logger).and_return(mock_logger)
-
- expect(mock_connection).to receive(:table_exists?).with(destination_table).and_return(false)
+ allow(backfill_job).to receive(:logger).and_return(mock_logger)
allow(mock_logger).to receive(:warn)
end
it 'exits without attempting to copy data' do
+ expect(mock_connection).to receive(:table_exists?).with(destination_table).and_return(false)
expect(mock_connection).not_to receive(:execute)
subject.perform(1, 100, source_table, destination_table, unique_key)
end
it 'logs a warning message that the job was skipped' do
+ expect(mock_connection).to receive(:table_exists?).with(destination_table).and_return(false)
expect(mock_logger).to receive(:warn).with(/#{destination_table} does not exist/)
subject.perform(1, 100, source_table, destination_table, unique_key)
diff --git a/spec/lib/gitlab/database/reflection_spec.rb b/spec/lib/gitlab/database/reflection_spec.rb
index 7c3d797817d..efc5bd1c1e1 100644
--- a/spec/lib/gitlab/database/reflection_spec.rb
+++ b/spec/lib/gitlab/database/reflection_spec.rb
@@ -259,6 +259,66 @@ RSpec.describe Gitlab::Database::Reflection do
end
end
+ describe '#flavor', :delete do
+ let(:result) { [double] }
+ let(:connection) { database.model.connection }
+
+ def stub_statements(statements)
+ statements = Array.wrap(statements)
+ execute = connection.method(:execute)
+
+ allow(connection).to receive(:execute) do |arg|
+ if statements.include?(arg)
+ result
+ else
+ execute.call(arg)
+ end
+ end
+ end
+
+ it 're-raises exceptions not matching expected messages' do
+ expect(database.model.connection)
+ .to receive(:execute)
+ .and_raise(ActiveRecord::StatementInvalid, 'Something else')
+
+ expect { database.flavor }.to raise_error ActiveRecord::StatementInvalid, /Something else/
+ end
+
+ it 'recognizes Amazon Aurora PostgreSQL' do
+ stub_statements(['SHOW rds.extensions', 'SELECT AURORA_VERSION()'])
+
+ expect(database.flavor).to eq('Amazon Aurora PostgreSQL')
+ end
+
+ it 'recognizes PostgreSQL on Amazon RDS' do
+ stub_statements('SHOW rds.extensions')
+
+ expect(database.flavor).to eq('PostgreSQL on Amazon RDS')
+ end
+
+ it 'recognizes CloudSQL for PostgreSQL' do
+ stub_statements('SHOW cloudsql.iam_authentication')
+
+ expect(database.flavor).to eq('Cloud SQL for PostgreSQL')
+ end
+
+ it 'recognizes Azure Database for PostgreSQL - Flexible Server' do
+ stub_statements(["SELECT datname FROM pg_database WHERE datname = 'azure_maintenance'", 'SHOW azure.extensions'])
+
+ expect(database.flavor).to eq('Azure Database for PostgreSQL - Flexible Server')
+ end
+
+ it 'recognizes Azure Database for PostgreSQL - Single Server' do
+ stub_statements("SELECT datname FROM pg_database WHERE datname = 'azure_maintenance'")
+
+ expect(database.flavor).to eq('Azure Database for PostgreSQL - Single Server')
+ end
+
+ it 'returns nil if can not recognize the flavor' do
+ expect(database.flavor).to be_nil
+ end
+ end
+
describe '#config' do
it 'returns a HashWithIndifferentAccess' do
expect(database.config)
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
index 0afbe46b7f1..bb91617714a 100644
--- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -6,30 +6,34 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
include Database::DatabaseHelpers
include ExclusiveLeaseHelpers
- describe '.perform' do
- subject { described_class.new(index, notifier).perform }
-
- let(:index) { create(:postgres_index) }
- let(:notifier) { instance_double(Gitlab::Database::Reindexing::GrafanaNotifier, notify_start: nil, notify_end: nil) }
- let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ReindexConcurrently, perform: nil) }
- let(:action) { create(:reindex_action, index: index) }
+ let(:notifier) { instance_double(Gitlab::Database::Reindexing::GrafanaNotifier, notify_start: nil, notify_end: nil) }
+ let(:index) { create(:postgres_index) }
+ let(:connection) { index.connection }
- let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/reindexing/coordinator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
- let(:lease_timeout) { 1.day }
- let(:uuid) { 'uuid' }
+ let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
+ let(:lease_key) { "gitlab/database/reindexing/coordinator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_timeout) { 1.day }
+ let(:uuid) { 'uuid' }
- around do |example|
- model = Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME]
+ around do |example|
+ model = Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME]
- Gitlab::Database::SharedModel.using_connection(model.connection) do
- example.run
- end
+ Gitlab::Database::SharedModel.using_connection(model.connection) do
+ example.run
end
+ end
- before do
- swapout_view_for_table(:postgres_indexes)
+ before do
+ swapout_view_for_table(:postgres_indexes)
+ end
+ describe '#perform' do
+ subject { described_class.new(index, notifier).perform }
+
+ let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ReindexConcurrently, perform: nil) }
+ let(:action) { create(:reindex_action, index: index) }
+
+ before do
allow(Gitlab::Database::Reindexing::ReindexConcurrently).to receive(:new).with(index).and_return(reindexer)
allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
end
@@ -87,4 +91,40 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
end
end
end
+
+ describe '#drop' do
+ let(:connection) { index.connection }
+
+ subject(:drop) { described_class.new(index, notifier).drop }
+
+ context 'when exclusive lease is granted' do
+ it 'drops the index with lock retries' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
+
+ expect_query("SET lock_timeout TO '60000ms'")
+ expect_query("DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"#{index.name}\"")
+ expect_query("RESET idle_in_transaction_session_timeout; RESET lock_timeout")
+
+ expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
+
+ drop
+ end
+
+ def expect_query(sql)
+ expect(connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
+ end
+
+ context 'when exclusive lease is not granted' do
+ it 'does not drop the index' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ expect(Gitlab::Database::WithLockRetriesOutsideTransaction).not_to receive(:new)
+ expect(connection).not_to receive(:execute)
+
+ drop
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/email/failure_handler_spec.rb b/spec/lib/gitlab/email/failure_handler_spec.rb
new file mode 100644
index 00000000000..a912996e8f2
--- /dev/null
+++ b/spec/lib/gitlab/email/failure_handler_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::FailureHandler do
+ let(:raw_message) { fixture_file('emails/valid_reply.eml') }
+ let(:receiver) { Gitlab::Email::Receiver.new(raw_message) }
+
+ context 'email processing errors' do
+ where(:error, :message, :can_retry) do
+ [
+ [Gitlab::Email::UnknownIncomingEmail, "We couldn't figure out what the email is for", false],
+ [Gitlab::Email::SentNotificationNotFoundError, "We couldn't figure out what the email is in reply to", false],
+ [Gitlab::Email::ProjectNotFound, "We couldn't find the project", false],
+ [Gitlab::Email::EmptyEmailError, "It appears that the email is blank", true],
+ [Gitlab::Email::UserNotFoundError, "We couldn't figure out what user corresponds to the email", false],
+ [Gitlab::Email::UserBlockedError, "Your account has been blocked", false],
+ [Gitlab::Email::UserNotAuthorizedError, "You are not allowed to perform this action", false],
+ [Gitlab::Email::NoteableNotFoundError, "The thread you are replying to no longer exists", false],
+ [Gitlab::Email::InvalidAttachment, "Could not deal with that", false],
+ [Gitlab::Email::InvalidRecordError, "The note could not be created for the following reasons", true],
+ [Gitlab::Email::EmailTooLarge, "it is too large", false]
+ ]
+ end
+
+ with_them do
+ it "sends out a rejection email for #{params[:error]}" do
+ perform_enqueued_jobs do
+ described_class.handle(receiver, error.new(message))
+ end
+
+ email = ActionMailer::Base.deliveries.last
+ expect(email).not_to be_nil
+ expect(email.to).to match_array(["jake@adventuretime.ooo"])
+ expect(email.subject).to include("Rejected")
+ expect(email.body.parts.last.to_s).to include(message)
+ end
+
+ it 'strips out the body before passing to EmailRejectionMailer' do
+ mail = Mail.new(raw_message)
+ mail.body = nil
+
+ expect(EmailRejectionMailer).to receive(:rejection).with(match(message), mail.encoded, can_retry).and_call_original
+
+ described_class.handle(receiver, error.new(message))
+ end
+ end
+ end
+
+ context 'non-processing errors' do
+ where(:error) do
+ [
+ [Gitlab::Email::AutoGeneratedEmailError.new("")],
+ [ActiveRecord::StatementTimeout.new("StatementTimeout")],
+ [RateLimitedService::RateLimitedError.new(key: :issues_create, rate_limiter: nil)]
+ ]
+ end
+
+ with_them do
+ it "does not send a rejection email for #{params[:error]}" do
+ perform_enqueued_jobs do
+ described_class.handle(receiver, error)
+ end
+
+ expect(ActionMailer::Base.deliveries).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
index af5f11c9362..3febc10831a 100644
--- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -178,5 +178,14 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
expect(result_hash.dig(:extra, :sidekiq)).to be_nil
end
end
+
+ context 'when there is Sidekiq data but no job' do
+ let(:value) { { other: 'foo' } }
+ let(:wrapped_value) { { extra: { sidekiq: value } } }
+
+ it 'does nothing' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/event_store/event_spec.rb b/spec/lib/gitlab/event_store/event_spec.rb
new file mode 100644
index 00000000000..97f6870a5ec
--- /dev/null
+++ b/spec/lib/gitlab/event_store/event_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EventStore::Event do
+ let(:event_class) { stub_const('TestEvent', Class.new(described_class)) }
+ let(:event) { event_class.new(data: data) }
+ let(:data) { { project_id: 123, project_path: 'org/the-project' } }
+
+ context 'when schema is not defined' do
+ it 'raises an error on initialization' do
+ expect { event }.to raise_error(NotImplementedError)
+ end
+ end
+
+ context 'when schema is defined' do
+ before do
+ event_class.class_eval do
+ def schema
+ {
+ 'required' => ['project_id'],
+ 'type' => 'object',
+ 'properties' => {
+ 'project_id' => { 'type' => 'integer' },
+ 'project_path' => { 'type' => 'string' }
+ }
+ }
+ end
+ end
+ end
+
+ describe 'schema validation' do
+ context 'when data matches the schema' do
+ it 'initializes the event correctly' do
+ expect(event.data).to eq(data)
+ end
+ end
+
+ context 'when required properties are present as well as unknown properties' do
+ let(:data) { { project_id: 123, unknown_key: 'unknown_value' } }
+
+ it 'initializes the event correctly' do
+ expect(event.data).to eq(data)
+ end
+ end
+
+ context 'when some properties are missing' do
+ let(:data) { { project_path: 'org/the-project' } }
+
+ it 'expects all properties to be present' do
+ expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent, /does not match the defined schema/)
+ end
+ end
+
+ context 'when data is not a Hash' do
+ let(:data) { 123 }
+
+ it 'raises an error' do
+ expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent, 'Event data must be a Hash')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/event_store/store_spec.rb b/spec/lib/gitlab/event_store/store_spec.rb
new file mode 100644
index 00000000000..711e1d5b4d5
--- /dev/null
+++ b/spec/lib/gitlab/event_store/store_spec.rb
@@ -0,0 +1,262 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::EventStore::Store do
+ let(:event_klass) { stub_const('TestEvent', Class.new(Gitlab::EventStore::Event)) }
+ let(:event) { event_klass.new(data: data) }
+ let(:another_event_klass) { stub_const('TestAnotherEvent', Class.new(Gitlab::EventStore::Event)) }
+
+ let(:worker) do
+ stub_const('EventSubscriber', Class.new).tap do |klass|
+ klass.class_eval do
+ include ApplicationWorker
+ include Gitlab::EventStore::Subscriber
+
+ def handle_event(event)
+ event.data
+ end
+ end
+ end
+ end
+
+ let(:another_worker) do
+ stub_const('AnotherEventSubscriber', Class.new).tap do |klass|
+ klass.class_eval do
+ include ApplicationWorker
+ include Gitlab::EventStore::Subscriber
+ end
+ end
+ end
+
+ let(:unrelated_worker) do
+ stub_const('UnrelatedEventSubscriber', Class.new).tap do |klass|
+ klass.class_eval do
+ include ApplicationWorker
+ include Gitlab::EventStore::Subscriber
+ end
+ end
+ end
+
+ before do
+ event_klass.class_eval do
+ def schema
+ {
+ 'required' => %w[name id],
+ 'type' => 'object',
+ 'properties' => {
+ 'name' => { 'type' => 'string' },
+ 'id' => { 'type' => 'integer' }
+ }
+ }
+ end
+ end
+ end
+
+ describe '#subscribe' do
+ it 'subscribes a worker to an event' do
+ store = described_class.new do |s|
+ s.subscribe worker, to: event_klass
+ end
+
+ subscriptions = store.subscriptions[event_klass]
+ expect(subscriptions.map(&:worker)).to contain_exactly(worker)
+ end
+
+ it 'subscribes multiple workers to an event' do
+ store = described_class.new do |s|
+ s.subscribe worker, to: event_klass
+ s.subscribe another_worker, to: event_klass
+ end
+
+ subscriptions = store.subscriptions[event_klass]
+ expect(subscriptions.map(&:worker)).to contain_exactly(worker, another_worker)
+ end
+
+ it 'subscribes a worker to multiple events is separate calls' do
+ store = described_class.new do |s|
+ s.subscribe worker, to: event_klass
+ s.subscribe worker, to: another_event_klass
+ end
+
+ subscriptions = store.subscriptions[event_klass]
+ expect(subscriptions.map(&:worker)).to contain_exactly(worker)
+
+ subscriptions = store.subscriptions[another_event_klass]
+ expect(subscriptions.map(&:worker)).to contain_exactly(worker)
+ end
+
+ it 'subscribes a worker to multiple events in a single call' do
+ store = described_class.new do |s|
+ s.subscribe worker, to: [event_klass, another_event_klass]
+ end
+
+ subscriptions = store.subscriptions[event_klass]
+ expect(subscriptions.map(&:worker)).to contain_exactly(worker)
+
+ subscriptions = store.subscriptions[another_event_klass]
+ expect(subscriptions.map(&:worker)).to contain_exactly(worker)
+ end
+
+ it 'subscribes a worker to an event with condition' do
+ store = described_class.new do |s|
+ s.subscribe worker, to: event_klass, if: ->(event) { event.data[:name] == 'Alice' }
+ end
+
+ subscriptions = store.subscriptions[event_klass]
+
+ expect(subscriptions.size).to eq(1)
+
+ subscription = subscriptions.first
+ expect(subscription).to be_an_instance_of(Gitlab::EventStore::Subscription)
+ expect(subscription.worker).to eq(worker)
+ expect(subscription.condition.call(double(data: { name: 'Bob' }))).to eq(false)
+ expect(subscription.condition.call(double(data: { name: 'Alice' }))).to eq(true)
+ end
+
+ it 'refuses the subscription if the target is not an Event object' do
+ expect do
+ described_class.new do |s|
+ s.subscribe worker, to: Integer
+ end
+ end.to raise_error(
+ Gitlab::EventStore::Error,
+ /Event being subscribed to is not a subclass of Gitlab::EventStore::Event/)
+ end
+
+ it 'refuses the subscription if the subscriber is not a worker' do
+ expect do
+ described_class.new do |s|
+ s.subscribe double, to: event_klass
+ end
+ end.to raise_error(
+ Gitlab::EventStore::Error,
+ /Subscriber is not an ApplicationWorker/)
+ end
+ end
+
+ describe '#publish' do
+ let(:data) { { name: 'Bob', id: 123 } }
+
+ context 'when event has a subscribed worker' do
+ let(:store) do
+ described_class.new do |store|
+ store.subscribe worker, to: event_klass
+ store.subscribe another_worker, to: another_event_klass
+ end
+ end
+
+ it 'dispatches the event to the subscribed worker' do
+ expect(worker).to receive(:perform_async).with('TestEvent', data)
+ expect(another_worker).not_to receive(:perform_async)
+
+ store.publish(event)
+ end
+
+ context 'when other workers subscribe to the same event' do
+ let(:store) do
+ described_class.new do |store|
+ store.subscribe worker, to: event_klass
+ store.subscribe another_worker, to: event_klass
+ store.subscribe unrelated_worker, to: another_event_klass
+ end
+ end
+
+ it 'dispatches the event to each subscribed worker' do
+ expect(worker).to receive(:perform_async).with('TestEvent', data)
+ expect(another_worker).to receive(:perform_async).with('TestEvent', data)
+ expect(unrelated_worker).not_to receive(:perform_async)
+
+ store.publish(event)
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(worker).to receive(:perform_async).and_raise(NoMethodError, 'the error message')
+ end
+
+ it 'is rescued and tracked' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .with(kind_of(NoMethodError), event_class: event.class.name, event_data: event.data)
+ .and_call_original
+
+ expect { store.publish(event) }.to raise_error(NoMethodError, 'the error message')
+ end
+ end
+
+ it 'raises and tracks an error when event is published inside a database transaction' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception)
+ .at_least(:once)
+ .and_call_original
+
+ expect do
+ ApplicationRecord.transaction do
+ store.publish(event)
+ end
+ end.to raise_error(Sidekiq::Worker::EnqueueFromTransactionError)
+ end
+
+ it 'refuses publishing if the target is not an Event object' do
+ expect { store.publish(double(:event)) }
+ .to raise_error(
+ Gitlab::EventStore::Error,
+ /Event being published is not an instance of Gitlab::EventStore::Event/)
+ end
+ end
+
+ context 'when event has subscribed workers with condition' do
+ let(:store) do
+ described_class.new do |s|
+ s.subscribe worker, to: event_klass, if: -> (event) { event.data[:name] == 'Bob' }
+ s.subscribe another_worker, to: event_klass, if: -> (event) { event.data[:name] == 'Alice' }
+ end
+ end
+
+ let(:event) { event_klass.new(data: data) }
+
+ it 'dispatches the event to the workers satisfying the condition' do
+ expect(worker).to receive(:perform_async).with('TestEvent', data)
+ expect(another_worker).not_to receive(:perform_async)
+
+ store.publish(event)
+ end
+ end
+ end
+
+ describe 'subscriber' do
+ let(:data) { { name: 'Bob', id: 123 } }
+ let(:event_name) { event.class.name }
+ let(:worker_instance) { worker.new }
+
+ subject { worker_instance.perform(event_name, data) }
+
+ it 'handles the event' do
+ expect(worker_instance).to receive(:handle_event).with(instance_of(event.class))
+
+ expect_any_instance_of(event.class) do |event|
+ expect(event).to receive(:data).and_return(data)
+ end
+
+ subject
+ end
+
+ context 'when the event name does not exist' do
+ let(:event_name) { 'UnknownClass' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::EventStore::InvalidEvent)
+ end
+ end
+
+ context 'when the worker does not define handle_event method' do
+ let(:worker_instance) { another_worker.new }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/exceptions_app_spec.rb b/spec/lib/gitlab/exceptions_app_spec.rb
new file mode 100644
index 00000000000..6b726a044a8
--- /dev/null
+++ b/spec/lib/gitlab/exceptions_app_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ExceptionsApp, type: :request do
+ describe '.call' do
+ let(:exceptions_app) { described_class.new(Rails.public_path) }
+ let(:app) { ActionDispatch::ShowExceptions.new(error_raiser, exceptions_app) }
+
+ before do
+ @app = app
+ end
+
+ context 'for a 500 error' do
+ let(:error_raiser) { proc { raise 'an unhandled error' } }
+
+ context 'for an HTML request' do
+ it 'fills in the request ID' do
+ get '/', env: { 'action_dispatch.request_id' => 'foo' }
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(response).to have_header('X-Gitlab-Custom-Error')
+ expect(response.body).to include('Request ID: <code>foo</code>')
+ end
+
+ it 'HTML-escapes the request ID' do
+ get '/', env: { 'action_dispatch.request_id' => '<b>foo</b>' }
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(response).to have_header('X-Gitlab-Custom-Error')
+ expect(response.body).to include('Request ID: <code>&lt;b&gt;foo&lt;/b&gt;</code>')
+ end
+
+ it 'returns an empty 500 when the 500.html page cannot be found' do
+ allow(File).to receive(:exist?).and_return(false)
+
+ get '/', env: { 'action_dispatch.request_id' => 'foo' }
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(response).not_to have_header('X-Gitlab-Custom-Error')
+ expect(response.body).to be_empty
+ end
+ end
+
+ context 'for a JSON request' do
+ it 'does not include the request ID' do
+ get '/', env: { 'action_dispatch.request_id' => 'foo' }, as: :json
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(response).not_to have_header('X-Gitlab-Custom-Error')
+ expect(response.body).not_to include('foo')
+ end
+ end
+ end
+
+ context 'for a 404 error' do
+ let(:error_raiser) { proc { raise AbstractController::ActionNotFound } }
+
+ it 'returns a 404 response that does not include the request ID' do
+ get '/', env: { 'action_dispatch.request_id' => 'foo' }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).not_to have_header('X-Gitlab-Custom-Error')
+ expect(response.body).not_to include('foo')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index f4875aa0ebc..7d4a3655be6 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
let!(:group_label) { create(:group_label, id: 321, name: 'group label', group: old_group) }
before do
- old_project.update(namespace: old_group)
+ old_project.update!(namespace: old_group)
end
context 'label referenced by id' do
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index bf2e3c7f5f8..4bf7994f4dd 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe Gitlab::GitAccess do
context 'when the DeployKey has access to the project' do
before do
- deploy_key.deploy_keys_projects.create(project: project, can_push: true)
+ deploy_key.deploy_keys_projects.create!(project: project, can_push: true)
end
it 'allows push and pull access' do
@@ -820,7 +820,7 @@ RSpec.describe Gitlab::GitAccess do
project.add_role(user, role)
end
- protected_branch.save
+ protected_branch.save!
aggregate_failures do
matrix.each do |action, allowed|
@@ -1090,7 +1090,7 @@ RSpec.describe Gitlab::GitAccess do
context 'when deploy_key can push' do
context 'when project is authorized' do
before do
- key.deploy_keys_projects.create(project: project, can_push: true)
+ key.deploy_keys_projects.create!(project: project, can_push: true)
end
it { expect { push_access_check }.not_to raise_error }
@@ -1120,7 +1120,7 @@ RSpec.describe Gitlab::GitAccess do
context 'when deploy_key cannot push' do
context 'when project is authorized' do
before do
- key.deploy_keys_projects.create(project: project, can_push: false)
+ key.deploy_keys_projects.create!(project: project, can_push: false)
end
it { expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:deploy_key_upload]) }
diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb
index 20d5972bd88..9c399e78d80 100644
--- a/spec/lib/gitlab/gpg/commit_spec.rb
+++ b/spec/lib/gitlab/gpg/commit_spec.rb
@@ -233,30 +233,6 @@ RSpec.describe Gitlab::Gpg::Commit do
verification_status: 'multiple_signatures'
)
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(multiple_gpg_signatures: false)
- end
-
- it 'returns an valid signature' do
- verified_signature = double('verified-signature', fingerprint: GpgHelpers::User1.fingerprint, valid?: true)
- allow(GPGME::Crypto).to receive(:new).and_return(crypto)
- allow(crypto).to receive(:verify).and_yield(verified_signature).and_yield(verified_signature)
-
- signature = described_class.new(commit).signature
-
- expect(signature).to have_attributes(
- commit_sha: commit_sha,
- project: project,
- gpg_key: gpg_key,
- gpg_key_primary_keyid: GpgHelpers::User1.primary_keyid,
- gpg_key_user_name: GpgHelpers::User1.names.first,
- gpg_key_user_email: GpgHelpers::User1.emails.first,
- verification_status: 'verified'
- )
- end
- end
end
context 'commit signed with a subkey' do
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index d0aae2ac475..7d459f2d88a 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -29,14 +29,42 @@ RSpec.describe Gitlab::HTTP do
context 'when reading the response is too slow' do
before do
+ # Override Net::HTTP to add a delay between sending each response chunk
+ mocked_http = Class.new(Net::HTTP) do
+ def request(*)
+ super do |response|
+ response.instance_eval do
+ def read_body(*)
+ @body.each do |fragment|
+ sleep 0.002.seconds
+
+ yield fragment if block_given?
+ end
+ end
+ end
+
+ yield response if block_given?
+
+ response
+ end
+ end
+ end
+
+ @original_net_http = Net.send(:remove_const, :HTTP)
+ Net.send(:const_set, :HTTP, mocked_http)
+
stub_const("#{described_class}::DEFAULT_READ_TOTAL_TIMEOUT", 0.001.seconds)
WebMock.stub_request(:post, /.*/).to_return do |request|
- sleep 0.002.seconds
- { body: 'I\'m slow', status: 200 }
+ { body: %w(a b), status: 200 }
end
end
+ after do
+ Net.send(:remove_const, :HTTP)
+ Net.send(:const_set, :HTTP, @original_net_http)
+ end
+
let(:options) { {} }
subject(:request_slow_responder) { described_class.post('http://example.org', **options) }
@@ -51,7 +79,7 @@ RSpec.describe Gitlab::HTTP do
end
it 'still calls the block' do
- expect { |b| described_class.post('http://example.org', **options, &b) }.to yield_with_args
+ expect { |b| described_class.post('http://example.org', **options, &b) }.to yield_successive_args('a', 'b')
end
end
diff --git a/spec/lib/gitlab/import/set_async_jid_spec.rb b/spec/lib/gitlab/import/set_async_jid_spec.rb
index 016f7cac61a..6931a7a953d 100644
--- a/spec/lib/gitlab/import/set_async_jid_spec.rb
+++ b/spec/lib/gitlab/import/set_async_jid_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Import::SetAsyncJid do
it 'sets the JID in Redis' do
expect(Gitlab::SidekiqStatus)
.to receive(:set)
- .with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION, value: 2)
+ .with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
.and_call_original
described_class.set_jid(project.import_state)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 7ed80cbcf66..f4a112d35aa 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -58,6 +58,7 @@ issues:
- test_reports
- requirement
- incident_management_issuable_escalation_status
+- incident_management_timeline_events
- pending_escalations
- customer_relations_contacts
- issue_customer_relations_contacts
@@ -135,6 +136,7 @@ project_members:
- source
- project
- member_task
+- member_namespace
merge_requests:
- status_check_responses
- subscriptions
@@ -280,6 +282,7 @@ ci_pipelines:
- dast_site_profiles_pipeline
- package_build_infos
- package_file_build_infos
+- build_trace_chunks
ci_refs:
- project
- ci_pipelines
@@ -601,6 +604,7 @@ project:
- bulk_import_exports
- ci_project_mirror
- sync_events
+- secure_files
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/avatar_saver_spec.rb b/spec/lib/gitlab/import_export/avatar_saver_spec.rb
index 334d930c47c..d897ce76da0 100644
--- a/spec/lib/gitlab/import_export/avatar_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/avatar_saver_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::ImportExport::AvatarSaver do
end
it 'saves a project avatar' do
- described_class.new(project: project_with_avatar, shared: shared).save
+ described_class.new(project: project_with_avatar, shared: shared).save # rubocop:disable Rails/SaveBang
expect(File).to exist(Dir["#{shared.export_path}/avatar/**/dk.png"].first)
end
diff --git a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
index bd8873fe20e..b8999f608b1 100644
--- a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationFactory do
let(:excluded_keys) { [] }
subject do
- described_class.create(relation_sym: relation_sym,
+ described_class.create(relation_sym: relation_sym, # rubocop:disable Rails/SaveBang
relation_hash: relation_hash,
relation_index: 1,
object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
diff --git a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
index 6680f4e7a03..346f653acd4 100644
--- a/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/design_repo_restorer_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::ImportExport::DesignRepoRestorer do
allow(instance).to receive(:storage_path).and_return(export_path)
end
- bundler.save
+ bundler.save # rubocop:disable Rails/SaveBang
end
after do
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index d5f31f235f5..adb613c3abc 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -258,7 +258,7 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
create(:resource_label_event, label: group_label, merge_request: merge_request)
create(:event, :created, target: milestone, project: project, author: user)
- create(:service, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
+ create(:integration, project: project, type: 'CustomIssueTrackerService', category: 'issue_tracker', properties: { one: 'value' })
create(:project_custom_attribute, project: project)
create(:project_custom_attribute, project: project)
diff --git a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
index ce6607f6a26..2f1e2dd2db4 100644
--- a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb
@@ -48,41 +48,16 @@ RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer do
subject { relation_tree_restorer.restore }
- shared_examples 'logging of relations creation' do
- context 'when log_import_export_relation_creation feature flag is enabled' do
- before do
- stub_feature_flags(log_import_export_relation_creation: group)
- end
-
- it 'logs top-level relation creation' do
- expect(shared.logger)
- .to receive(:info)
- .with(hash_including(message: '[Project/Group Import] Created new object relation'))
- .at_least(:once)
-
- subject
- end
- end
-
- context 'when log_import_export_relation_creation feature flag is disabled' do
- before do
- stub_feature_flags(log_import_export_relation_creation: false)
- end
-
- it 'does not log top-level relation creation' do
- expect(shared.logger)
- .to receive(:info)
- .with(hash_including(message: '[Project/Group Import] Created new object relation'))
- .never
-
- subject
- end
- end
- end
-
it 'restores group tree' do
expect(subject).to eq(true)
end
- include_examples 'logging of relations creation'
+ it 'logs top-level relation creation' do
+ expect(shared.logger)
+ .to receive(:info)
+ .with(hash_including(message: '[Project/Group Import] Created new object relation'))
+ .at_least(:once)
+
+ subject
+ end
end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 80ba50976af..ea8b10675af 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
end
context 'original service exists' do
- let(:service_id) { create(:service, project: project).id }
+ let(:service_id) { create(:integration, project: project).id }
it 'does not have the original service_id' do
expect(created_object.service_id).not_to eq(service_id)
diff --git a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb
index 577f1e46db6..b7b652005e9 100644
--- a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb
@@ -54,38 +54,6 @@ RSpec.describe Gitlab::ImportExport::Project::RelationTreeRestorer do
end
end
- shared_examples 'logging of relations creation' do
- context 'when log_import_export_relation_creation feature flag is enabled' do
- before do
- stub_feature_flags(log_import_export_relation_creation: group)
- end
-
- it 'logs top-level relation creation' do
- expect(shared.logger)
- .to receive(:info)
- .with(hash_including(message: '[Project/Group Import] Created new object relation'))
- .at_least(:once)
-
- subject
- end
- end
-
- context 'when log_import_export_relation_creation feature flag is disabled' do
- before do
- stub_feature_flags(log_import_export_relation_creation: false)
- end
-
- it 'does not log top-level relation creation' do
- expect(shared.logger)
- .to receive(:info)
- .with(hash_including(message: '[Project/Group Import] Created new object relation'))
- .never
-
- subject
- end
- end
- end
-
context 'with legacy reader' do
let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' }
let(:relation_reader) do
@@ -106,7 +74,14 @@ RSpec.describe Gitlab::ImportExport::Project::RelationTreeRestorer do
create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project', group: group)
end
- include_examples 'logging of relations creation'
+ it 'logs top-level relation creation' do
+ expect(shared.logger)
+ .to receive(:info)
+ .with(hash_including(message: '[Project/Group Import] Created new object relation'))
+ .at_least(:once)
+
+ subject
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 6ffe2187466..f019883a91e 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -692,6 +692,7 @@ Badge:
- type
ProjectCiCdSetting:
- group_runners_enabled
+- runner_token_expiration_interval
ProjectSetting:
- allow_merge_on_skipped_pipeline
- has_confluence
diff --git a/spec/lib/gitlab/import_export/uploads_saver_spec.rb b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
index 8e9be209f89..bfb18c58806 100644
--- a/spec/lib/gitlab/import_export/uploads_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_saver_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::ImportExport::UploadsSaver do
end
it 'copies the uploads to the export path' do
- saver.save
+ saver.save # rubocop:disable Rails/SaveBang
uploads = Dir.glob(File.join(shared.export_path, 'uploads/**/*')).map { |file| File.basename(file) }
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::ImportExport::UploadsSaver do
end
it 'copies the uploads to the export path' do
- saver.save
+ saver.save # rubocop:disable Rails/SaveBang
uploads = Dir.glob(File.join(shared.export_path, 'uploads/**/*')).map { |file| File.basename(file) }
diff --git a/spec/lib/gitlab/integrations/sti_type_spec.rb b/spec/lib/gitlab/integrations/sti_type_spec.rb
index 70b93d6a4b5..1205b74dc9d 100644
--- a/spec/lib/gitlab/integrations/sti_type_spec.rb
+++ b/spec/lib/gitlab/integrations/sti_type_spec.rb
@@ -46,11 +46,11 @@ RSpec.describe Gitlab::Integrations::StiType do
SQL
end
- let_it_be(:service) { create(:service) }
+ let_it_be(:integration) { create(:integration) }
it 'forms SQL UPDATE statements correctly' do
sql_statements = types.map do |type|
- record = ActiveRecord::QueryRecorder.new { service.update_column(:type, type) }
+ record = ActiveRecord::QueryRecorder.new { integration.update_column(:type, type) }
record.log.first
end
@@ -65,8 +65,6 @@ RSpec.describe Gitlab::Integrations::StiType do
SQL
end
- let(:service) { create(:service) }
-
it 'forms SQL DELETE statements correctly' do
sql_statements = types.map do |type|
record = ActiveRecord::QueryRecorder.new { Integration.delete_by(type: type) }
@@ -81,7 +79,7 @@ RSpec.describe Gitlab::Integrations::StiType do
describe '#deserialize' do
specify 'it deserializes type correctly', :aggregate_failures do
types.each do |type|
- service = create(:service, type: type)
+ service = create(:integration, type: type)
expect(service.type).to eq('AsanaService')
end
@@ -90,7 +88,7 @@ RSpec.describe Gitlab::Integrations::StiType do
describe '#cast' do
it 'casts type as model correctly', :aggregate_failures do
- create(:service, type: 'AsanaService')
+ create(:integration, type: 'AsanaService')
types.each do |type|
expect(Integration.find_by(type: type)).to be_kind_of(Integrations::Asana)
@@ -100,7 +98,7 @@ RSpec.describe Gitlab::Integrations::StiType do
describe '#changed?' do
it 'detects changes correctly', :aggregate_failures do
- service = create(:service, type: 'AsanaService')
+ service = create(:integration, type: 'AsanaService')
types.each do |type|
service.type = type
diff --git a/spec/lib/gitlab/jwt_authenticatable_spec.rb b/spec/lib/gitlab/jwt_authenticatable_spec.rb
index 36bb46cb250..92d5feceb75 100644
--- a/spec/lib/gitlab/jwt_authenticatable_spec.rb
+++ b/spec/lib/gitlab/jwt_authenticatable_spec.rb
@@ -14,17 +14,12 @@ RSpec.describe Gitlab::JwtAuthenticatable do
end
before do
- begin
- File.delete(test_class.secret_path)
- rescue Errno::ENOENT
- end
+ FileUtils.rm_f(test_class.secret_path)
test_class.write_secret
end
- describe '.secret' do
- subject(:secret) { test_class.secret }
-
+ shared_examples 'reading secret from the secret path' do
it 'returns 32 bytes' do
expect(secret).to be_a(String)
expect(secret.length).to eq(32)
@@ -32,62 +27,170 @@ RSpec.describe Gitlab::JwtAuthenticatable do
end
it 'accepts a trailing newline' do
- File.open(test_class.secret_path, 'a') { |f| f.write "\n" }
+ File.open(secret_path, 'a') { |f| f.write "\n" }
expect(secret.length).to eq(32)
end
it 'raises an exception if the secret file cannot be read' do
- File.delete(test_class.secret_path)
+ File.delete(secret_path)
expect { secret }.to raise_exception(Errno::ENOENT)
end
it 'raises an exception if the secret file contains the wrong number of bytes' do
- File.truncate(test_class.secret_path, 0)
+ File.truncate(secret_path, 0)
expect { secret }.to raise_exception(RuntimeError)
end
end
+ describe '.secret' do
+ it_behaves_like 'reading secret from the secret path' do
+ subject(:secret) { test_class.secret }
+
+ let(:secret_path) { test_class.secret_path }
+ end
+ end
+
+ describe '.read_secret' do
+ it_behaves_like 'reading secret from the secret path' do
+ subject(:secret) { test_class.read_secret(secret_path) }
+
+ let(:secret_path) { test_class.secret_path }
+ end
+ end
+
describe '.write_secret' do
- it 'uses mode 0600' do
- expect(File.stat(test_class.secret_path).mode & 0777).to eq(0600)
+ context 'without an input' do
+ it 'uses mode 0600' do
+ expect(File.stat(test_class.secret_path).mode & 0777).to eq(0600)
+ end
+
+ it 'writes base64 data' do
+ bytes = Base64.strict_decode64(File.read(test_class.secret_path))
+
+ expect(bytes).not_to be_empty
+ end
end
- it 'writes base64 data' do
- bytes = Base64.strict_decode64(File.read(test_class.secret_path))
+ context 'with an input' do
+ let(:another_path) do
+ Rails.root.join('tmp', 'tests', '.jwt_another_shared_secret')
+ end
- expect(bytes).not_to be_empty
+ after do
+ File.delete(another_path)
+ rescue Errno::ENOENT
+ end
+
+ it 'uses mode 0600' do
+ test_class.write_secret(another_path)
+ expect(File.stat(another_path).mode & 0777).to eq(0600)
+ end
+
+ it 'writes base64 data' do
+ test_class.write_secret(another_path)
+ bytes = Base64.strict_decode64(File.read(another_path))
+
+ expect(bytes).not_to be_empty
+ end
end
end
- describe '.decode_jwt_for_issuer' do
- let(:payload) { { 'iss' => 'test_issuer' } }
+ describe '.decode_jwt' do |decode|
+ let(:payload) { {} }
+
+ context 'use included class secret' do
+ it 'accepts a correct header' do
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message) }.not_to raise_error
+ end
+
+ it 'raises an error when the JWT is not signed' do
+ encoded_message = JWT.encode(payload, nil, 'none')
+
+ expect { test_class.decode_jwt(encoded_message) }.to raise_error(JWT::DecodeError)
+ end
- it 'accepts a correct header' do
- encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+ it 'raises an error when the header is signed with the wrong secret' do
+ encoded_message = JWT.encode(payload, 'wrongsecret', 'HS256')
- expect { test_class.decode_jwt_for_issuer('test_issuer', encoded_message) }.not_to raise_error
+ expect { test_class.decode_jwt(encoded_message) }.to raise_error(JWT::DecodeError)
+ end
end
- it 'raises an error when the JWT is not signed' do
- encoded_message = JWT.encode(payload, nil, 'none')
+ context 'use an input secret' do
+ let(:another_secret) { 'another secret' }
+
+ it 'accepts a correct header' do
+ encoded_message = JWT.encode(payload, another_secret, 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message, another_secret) }.not_to raise_error
+ end
- expect { test_class.decode_jwt_for_issuer('test_issuer', encoded_message) }.to raise_error(JWT::DecodeError)
+ it 'raises an error when the JWT is not signed' do
+ encoded_message = JWT.encode(payload, nil, 'none')
+
+ expect { test_class.decode_jwt(encoded_message, another_secret) }.to raise_error(JWT::DecodeError)
+ end
+
+ it 'raises an error when the header is signed with the wrong secret' do
+ encoded_message = JWT.encode(payload, 'wrongsecret', 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message, another_secret) }.to raise_error(JWT::DecodeError)
+ end
end
- it 'raises an error when the header is signed with the wrong secret' do
- encoded_message = JWT.encode(payload, 'wrongsecret', 'HS256')
+ context 'issuer option' do
+ let(:payload) { { 'iss' => 'test_issuer' } }
+
+ it 'returns decoded payload if issuer is correct' do
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+ payload = test_class.decode_jwt(encoded_message, issuer: 'test_issuer')
- expect { test_class.decode_jwt_for_issuer('test_issuer', encoded_message) }.to raise_error(JWT::DecodeError)
+ expect(payload[0]).to match a_hash_including('iss' => 'test_issuer')
+ end
+
+ it 'raises an error when the issuer is incorrect' do
+ payload['iss'] = 'somebody else'
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message, issuer: 'test_issuer') }.to raise_error(JWT::DecodeError)
+ end
end
- it 'raises an error when the issuer is incorrect' do
- payload['iss'] = 'somebody else'
- encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+ context 'iat_after option' do
+ it 'returns decoded payload if iat is valid' do
+ freeze_time do
+ encoded_message = JWT.encode(payload.merge(iat: (Time.current - 10.seconds).to_i), test_class.secret, 'HS256')
+ payload = test_class.decode_jwt(encoded_message, iat_after: Time.current - 20.seconds)
+
+ expect(payload[0]).to match a_hash_including('iat' => be_a(Integer))
+ end
+ end
+
+ it 'raises an error if iat is invalid' do
+ encoded_message = JWT.encode(payload.merge(iat: 'wrong'), test_class.secret, 'HS256')
- expect { test_class.decode_jwt_for_issuer('test_issuer', encoded_message) }.to raise_error(JWT::DecodeError)
+ expect { test_class.decode_jwt(encoded_message, iat_after: true) }.to raise_error(JWT::DecodeError)
+ end
+
+ it 'raises an error if iat is absent' do
+ encoded_message = JWT.encode(payload, test_class.secret, 'HS256')
+
+ expect { test_class.decode_jwt(encoded_message, iat_after: true) }.to raise_error(JWT::DecodeError)
+ end
+
+ it 'raises an error if iat is too far in the past' do
+ freeze_time do
+ encoded_message = JWT.encode(payload.merge(iat: (Time.current - 30.seconds).to_i), test_class.secret, 'HS256')
+ expect do
+ test_class.decode_jwt(encoded_message, iat_after: Time.current - 20.seconds)
+ end.to raise_error(JWT::ExpiredSignature, 'Token has expired')
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/lets_encrypt/client_spec.rb b/spec/lib/gitlab/lets_encrypt/client_spec.rb
index f1284318687..1baf8749532 100644
--- a/spec/lib/gitlab/lets_encrypt/client_spec.rb
+++ b/spec/lib/gitlab/lets_encrypt/client_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe ::Gitlab::LetsEncrypt::Client do
context 'when private key is saved in settings' do
let!(:saved_private_key) do
key = OpenSSL::PKey::RSA.new(4096).to_pem
- Gitlab::CurrentSettings.current_application_settings.update(lets_encrypt_private_key: key)
+ Gitlab::CurrentSettings.current_application_settings.update!(lets_encrypt_private_key: key)
key
end
diff --git a/spec/lib/gitlab/lfs/client_spec.rb b/spec/lib/gitlab/lfs/client_spec.rb
index 0f9637e8ca4..db450c79dfa 100644
--- a/spec/lib/gitlab/lfs/client_spec.rb
+++ b/spec/lib/gitlab/lfs/client_spec.rb
@@ -114,6 +114,52 @@ RSpec.describe Gitlab::Lfs::Client do
end
end
+ context 'server returns 200 OK with a chunked transfer request' do
+ before do
+ upload_action['header']['Transfer-Encoding'] = 'gzip, chunked'
+ end
+
+ it "makes an HTTP PUT with expected parameters" do
+ stub_upload(object: object, headers: upload_action['header'], chunked_transfer: true).to_return(status: 200)
+
+ lfs_client.upload!(object, upload_action, authenticated: true)
+ end
+ end
+
+ context 'server returns 200 OK with a username and password in the URL' do
+ let(:base_url) { "https://someuser:testpass@example.com" }
+
+ it "makes an HTTP PUT with expected parameters" do
+ stub_upload(
+ object: object,
+ headers: basic_auth_headers.merge(upload_action['header']),
+ url: "https://example.com/some/file"
+ ).to_return(status: 200)
+
+ lfs_client.upload!(object, upload_action, authenticated: true)
+ end
+ end
+
+ context 'no credentials in client' do
+ subject(:lfs_client) { described_class.new(base_url, credentials: {}) }
+
+ context 'server returns 200 OK with credentials in URL' do
+ let(:creds) { 'someuser:testpass' }
+ let(:base_url) { "https://#{creds}@example.com" }
+ let(:auth_headers) { { 'Authorization' => "Basic #{Base64.strict_encode64(creds)}" } }
+
+ it "makes an HTTP PUT with expected parameters" do
+ stub_upload(
+ object: object,
+ headers: auth_headers.merge(upload_action['header']),
+ url: "https://example.com/some/file"
+ ).to_return(status: 200)
+
+ lfs_client.upload!(object, upload_action, authenticated: true)
+ end
+ end
+ end
+
context 'server returns 200 OK to an unauthenticated request' do
it "makes an HTTP PUT with expected parameters" do
stub = stub_upload(
@@ -159,7 +205,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 400)
- expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed to upload object: HTTP status 400/)
end
end
@@ -167,20 +213,25 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 500)
- expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed to upload object: HTTP status 500/)
end
end
- def stub_upload(object:, headers:)
+ def stub_upload(object:, headers:, url: upload_action['href'], chunked_transfer: false)
headers = {
'Content-Type' => 'application/octet-stream',
- 'Content-Length' => object.size.to_s,
'User-Agent' => git_lfs_user_agent
}.merge(headers)
- stub_request(:put, upload_action['href']).with(
+ if chunked_transfer
+ headers['Transfer-Encoding'] = 'gzip, chunked'
+ else
+ headers['Content-Length'] = object.size.to_s
+ end
+
+ stub_request(:put, url).with(
body: object.file.read,
- headers: headers.merge('Content-Length' => object.size.to_s)
+ headers: headers
)
end
end
@@ -196,11 +247,25 @@ RSpec.describe Gitlab::Lfs::Client do
end
end
+ context 'server returns 200 OK with a username and password in the URL' do
+ let(:base_url) { "https://someuser:testpass@example.com" }
+
+ it "makes an HTTP PUT with expected parameters" do
+ stub_verify(
+ object: object,
+ headers: basic_auth_headers.merge(verify_action['header']),
+ url: "https://example.com/some/file/verify"
+ ).to_return(status: 200)
+
+ lfs_client.verify!(object, verify_action, authenticated: true)
+ end
+ end
+
context 'server returns 200 OK to an unauthenticated request' do
it "makes an HTTP POST with expected parameters" do
stub = stub_verify(
object: object,
- headers: basic_auth_headers.merge(upload_action['header'])
+ headers: basic_auth_headers.merge(verify_action['header'])
).to_return(status: 200)
lfs_client.verify!(object, verify_action, authenticated: false)
@@ -226,7 +291,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_verify(object: object, headers: verify_action['header']).to_return(status: 400)
- expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed to verify object: HTTP status 400/)
end
end
@@ -234,18 +299,18 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_verify(object: object, headers: verify_action['header']).to_return(status: 500)
- expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed to verify object: HTTP status 500/)
end
end
- def stub_verify(object:, headers:)
+ def stub_verify(object:, headers:, url: verify_action['href'])
headers = {
'Accept' => git_lfs_content_type,
'Content-Type' => git_lfs_content_type,
'User-Agent' => git_lfs_user_agent
}.merge(headers)
- stub_request(:post, verify_action['href']).with(
+ stub_request(:post, url).with(
body: object.to_json(only: [:oid, :size]),
headers: headers
)
diff --git a/spec/lib/gitlab/logger_spec.rb b/spec/lib/gitlab/logger_spec.rb
new file mode 100644
index 00000000000..ed22af8355f
--- /dev/null
+++ b/spec/lib/gitlab/logger_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Logger do
+ describe '.build' do
+ before do
+ allow(described_class).to receive(:file_name_noext).and_return('log')
+ end
+
+ subject { described_class.build }
+
+ it 'builds logger using Gitlab::Logger.log_level' do
+ expect(described_class).to receive(:log_level).and_return(:warn)
+
+ expect(subject.level).to eq(described_class::WARN)
+ end
+
+ it 'raises ArgumentError if invalid log level' do
+ allow(described_class).to receive(:log_level).and_return(:invalid)
+
+ expect { subject.level }.to raise_error(ArgumentError, 'invalid log level: invalid')
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:env_value, :resulting_level) do
+ 0 | described_class::DEBUG
+ :debug | described_class::DEBUG
+ 'debug' | described_class::DEBUG
+ 'DEBUG' | described_class::DEBUG
+ 'DeBuG' | described_class::DEBUG
+ 1 | described_class::INFO
+ :info | described_class::INFO
+ 'info' | described_class::INFO
+ 'INFO' | described_class::INFO
+ 'InFo' | described_class::INFO
+ 2 | described_class::WARN
+ :warn | described_class::WARN
+ 'warn' | described_class::WARN
+ 'WARN' | described_class::WARN
+ 'WaRn' | described_class::WARN
+ 3 | described_class::ERROR
+ :error | described_class::ERROR
+ 'error' | described_class::ERROR
+ 'ERROR' | described_class::ERROR
+ 'ErRoR' | described_class::ERROR
+ 4 | described_class::FATAL
+ :fatal | described_class::FATAL
+ 'fatal' | described_class::FATAL
+ 'FATAL' | described_class::FATAL
+ 'FaTaL' | described_class::FATAL
+ 5 | described_class::UNKNOWN
+ :unknown | described_class::UNKNOWN
+ 'unknown' | described_class::UNKNOWN
+ 'UNKNOWN' | described_class::UNKNOWN
+ 'UnKnOwN' | described_class::UNKNOWN
+ end
+
+ with_them do
+ it 'builds logger if valid log level' do
+ stub_env('GITLAB_LOG_LEVEL', env_value)
+
+ expect(subject.level).to eq(resulting_level)
+ end
+ end
+ end
+
+ describe '.log_level' do
+ context 'if GITLAB_LOG_LEVEL is set' do
+ before do
+ stub_env('GITLAB_LOG_LEVEL', described_class::ERROR)
+ end
+
+ it 'returns value of GITLAB_LOG_LEVEL' do
+ expect(described_class.log_level).to eq(described_class::ERROR)
+ end
+
+ it 'ignores fallback' do
+ expect(described_class.log_level(fallback: described_class::FATAL)).to eq(described_class::ERROR)
+ end
+ end
+
+ context 'if GITLAB_LOG_LEVEL is not set' do
+ it 'returns default fallback DEBUG' do
+ expect(described_class.log_level).to eq(described_class::DEBUG)
+ end
+
+ it 'returns passed fallback' do
+ expect(described_class.log_level(fallback: described_class::FATAL)).to eq(described_class::FATAL)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/mail_room/authenticator_spec.rb b/spec/lib/gitlab/mail_room/authenticator_spec.rb
new file mode 100644
index 00000000000..44120902661
--- /dev/null
+++ b/spec/lib/gitlab/mail_room/authenticator_spec.rb
@@ -0,0 +1,188 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::MailRoom::Authenticator do
+ let(:yml_config) do
+ {
+ enabled: true,
+ address: 'address@example.com'
+ }
+ end
+
+ let(:incoming_email_secret_path) { '/path/to/incoming_email_secret' }
+ let(:incoming_email_config) { yml_config.merge(secret_file: incoming_email_secret_path) }
+
+ let(:service_desk_email_secret_path) { '/path/to/service_desk_email_secret' }
+ let(:service_desk_email_config) { yml_config.merge(secret_file: service_desk_email_secret_path) }
+
+ let(:configs) do
+ {
+ incoming_email: incoming_email_config,
+ service_desk_email: service_desk_email_config
+ }
+ end
+
+ before do
+ allow(Gitlab::MailRoom).to receive(:enabled_configs).and_return(configs)
+
+ described_class.clear_memoization(:jwt_secret_incoming_email)
+ described_class.clear_memoization(:jwt_secret_service_desk_email)
+ end
+
+ after do
+ described_class.clear_memoization(:jwt_secret_incoming_email)
+ described_class.clear_memoization(:jwt_secret_service_desk_email)
+ end
+
+ around do |example|
+ freeze_time do
+ example.run
+ end
+ end
+
+ describe '#verify_api_request' do
+ let(:incoming_email_secret) { SecureRandom.hex(16) }
+ let(:service_desk_email_secret) { SecureRandom.hex(16) }
+ let(:payload) { { iss: described_class::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes + 1.second).to_i } }
+
+ before do
+ allow(described_class).to receive(:secret).with(:incoming_email).and_return(incoming_email_secret)
+ allow(described_class).to receive(:secret).with(:service_desk_email).and_return(service_desk_email_secret)
+ end
+
+ context 'verify a valid token' do
+ it 'returns the decoded payload' do
+ encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'incoming_email')[0]).to match a_hash_including(
+ "iss" => "gitlab-mailroom",
+ "iat" => be_a(Integer)
+ )
+
+ encoded_token = JWT.encode(payload, service_desk_email_secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'service_desk_email')[0]).to match a_hash_including(
+ "iss" => "gitlab-mailroom",
+ "iat" => be_a(Integer)
+ )
+ end
+ end
+
+ context 'verify an invalid token' do
+ it 'returns false' do
+ encoded_token = JWT.encode(payload, 'wrong secret', 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
+ end
+ end
+
+ context 'verify a valid token but wrong mailbox type' do
+ it 'returns false' do
+ encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'service_desk_email')).to eq(false)
+ end
+ end
+
+ context 'verify a valid token but wrong issuer' do
+ let(:payload) { { iss: 'invalid_issuer' } }
+
+ it 'returns false' do
+ encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
+ end
+ end
+
+ context 'verify a valid token but expired' do
+ let(:payload) { { iss: described_class::INTERNAL_API_REQUEST_JWT_ISSUER, iat: (Time.current - 5.minutes - 1.second).to_i } }
+
+ it 'returns false' do
+ encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
+ end
+ end
+
+ context 'verify a valid token but wrong header field' do
+ it 'returns false' do
+ encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
+ headers = { 'a-wrong-header' => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
+ end
+ end
+
+ context 'verify headers for a disabled mailbox type' do
+ let(:configs) { { service_desk_email: service_desk_email_config } }
+
+ it 'returns false' do
+ encoded_token = JWT.encode(payload, incoming_email_secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers, 'incoming_email')).to eq(false)
+ end
+ end
+
+ context 'verify headers for a non-existing mailbox type' do
+ it 'returns false' do
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => 'something' }
+
+ expect(described_class.verify_api_request(headers, 'invalid_mailbox_type')).to eq(false)
+ end
+ end
+ end
+
+ describe '#secret' do
+ let(:incoming_email_secret) { SecureRandom.hex(16) }
+ let(:service_desk_email_secret) { SecureRandom.hex(16) }
+
+ context 'the secret is valid' do
+ before do
+ allow(described_class).to receive(:read_secret).with(incoming_email_secret_path).and_return(incoming_email_secret).once
+ allow(described_class).to receive(:read_secret).with(service_desk_email_secret_path).and_return(service_desk_email_secret).once
+ end
+
+ it 'returns the memorized secret from a file' do
+ expect(described_class.secret(:incoming_email)).to eql(incoming_email_secret)
+ # The second call does not trigger secret read again
+ expect(described_class.secret(:incoming_email)).to eql(incoming_email_secret)
+ expect(described_class).to have_received(:read_secret).with(incoming_email_secret_path).once
+
+ expect(described_class.secret(:service_desk_email)).to eql(service_desk_email_secret)
+ # The second call does not trigger secret read again
+ expect(described_class.secret(:service_desk_email)).to eql(service_desk_email_secret)
+ expect(described_class).to have_received(:read_secret).with(service_desk_email_secret_path).once
+ end
+ end
+
+ context 'the secret file is not configured' do
+ let(:incoming_email_config) { yml_config }
+
+ it 'raises a SecretConfigurationError exception' do
+ expect do
+ described_class.secret(:incoming_email)
+ end.to raise_error(described_class::SecretConfigurationError, "incoming_email's secret_file configuration is missing")
+ end
+ end
+
+ context 'the secret file not found' do
+ before do
+ allow(described_class).to receive(:read_secret).with(incoming_email_secret_path).and_raise(Errno::ENOENT)
+ end
+
+ it 'raises a SecretConfigurationError exception' do
+ expect do
+ described_class.secret(:incoming_email)
+ end.to raise_error(described_class::SecretConfigurationError, "Fail to read incoming_email's secret: No such file or directory")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index 0bd1a27c65e..a4fcf71a012 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe Gitlab::MailRoom do
end
before do
+ allow(described_class).to receive(:load_yaml).and_return(configs)
described_class.instance_variable_set(:@enabled_configs, nil)
end
@@ -38,10 +39,6 @@ RSpec.describe Gitlab::MailRoom do
end
describe '#enabled_configs' do
- before do
- allow(described_class).to receive(:load_yaml).and_return(configs)
- end
-
context 'when both email and address is set' do
it 'returns email configs' do
expect(described_class.enabled_configs.size).to eq(2)
@@ -79,7 +76,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { enabled: true, address: 'address@example.com' } }
it 'overwrites missing values with the default' do
- expect(described_class.enabled_configs.first[:port]).to eq(Gitlab::MailRoom::DEFAULT_CONFIG[:port])
+ expect(described_class.enabled_configs.each_value.first[:port]).to eq(Gitlab::MailRoom::DEFAULT_CONFIG[:port])
end
end
@@ -88,7 +85,7 @@ RSpec.describe Gitlab::MailRoom do
it 'returns only encoming_email' do
expect(described_class.enabled_configs.size).to eq(1)
- expect(described_class.enabled_configs.first[:worker]).to eq('EmailReceiverWorker')
+ expect(described_class.enabled_configs.each_value.first[:worker]).to eq('EmailReceiverWorker')
end
end
@@ -100,11 +97,12 @@ RSpec.describe Gitlab::MailRoom do
end
it 'sets redis config' do
- config = described_class.enabled_configs.first
-
- expect(config[:redis_url]).to eq('localhost')
- expect(config[:redis_db]).to eq(99)
- expect(config[:sentinels]).to eq('yes, them')
+ config = described_class.enabled_configs.each_value.first
+ expect(config).to include(
+ redis_url: 'localhost',
+ redis_db: 99,
+ sentinels: 'yes, them'
+ )
end
end
@@ -113,7 +111,7 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { log_path: 'tiny_log.log' } }
it 'expands the log path to an absolute value' do
- new_path = Pathname.new(described_class.enabled_configs.first[:log_path])
+ new_path = Pathname.new(described_class.enabled_configs.each_value.first[:log_path])
expect(new_path.absolute?).to be_truthy
end
end
@@ -122,9 +120,48 @@ RSpec.describe Gitlab::MailRoom do
let(:custom_config) { { log_path: '/dev/null' } }
it 'leaves the path as-is' do
- expect(described_class.enabled_configs.first[:log_path]).to eq '/dev/null'
+ expect(described_class.enabled_configs.each_value.first[:log_path]).to eq '/dev/null'
end
end
end
end
+
+ describe '#enabled_mailbox_types' do
+ context 'when all mailbox types are enabled' do
+ it 'returns the mailbox types' do
+ expect(described_class.enabled_mailbox_types).to match(%w[incoming_email service_desk_email])
+ end
+ end
+
+ context 'when an mailbox_types is disabled' do
+ let(:incoming_email_config) { yml_config.merge(enabled: false) }
+
+ it 'returns the mailbox types' do
+ expect(described_class.enabled_mailbox_types).to match(%w[service_desk_email])
+ end
+ end
+
+ context 'when email is disabled' do
+ let(:custom_config) { { enabled: false } }
+
+ it 'returns an empty array' do
+ expect(described_class.enabled_mailbox_types).to match_array([])
+ end
+ end
+ end
+
+ describe '#worker_for' do
+ context 'matched mailbox types' do
+ it 'returns the constantized worker class' do
+ expect(described_class.worker_for('incoming_email')).to eql(EmailReceiverWorker)
+ expect(described_class.worker_for('service_desk_email')).to eql(ServiceDeskEmailReceiverWorker)
+ end
+ end
+
+ context 'non-existing mailbox_type' do
+ it 'returns nil' do
+ expect(described_class.worker_for('another_mailbox_type')).to be(nil)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
index 65c76aac10c..2407b497249 100644
--- a/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
+++ b/spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb
@@ -15,7 +15,8 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
)
end
- let(:user) { project.creator }
+ let(:current_user) { create(:user, name: 'John Doe', email: 'john.doe@example.com') }
+ let(:author) { project.creator }
let(:source_branch) { 'feature' }
let(:merge_request_description) { "Merge Request Description\nNext line" }
let(:merge_request_title) { 'Bugfix' }
@@ -27,13 +28,13 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
target_project: project,
target_branch: 'master',
source_branch: source_branch,
- author: user,
+ author: author,
description: merge_request_description,
title: merge_request_title
)
end
- subject { described_class.new(merge_request: merge_request) }
+ subject { described_class.new(merge_request: merge_request, current_user: current_user) }
shared_examples_for 'commit message with template' do |message_template_name|
it 'returns nil when template is not set in target project' do
@@ -56,6 +57,19 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
end
end
+ context 'when project has commit template with only the title' do
+ let(:merge_request) do
+ double(:merge_request, title: 'Fixes', target_project: project, to_reference: '!123', metrics: nil, merge_user: nil)
+ end
+
+ let(message_template_name) { '%{title}' }
+
+ it 'evaluates only necessary variables' do
+ expect(result_message).to eq 'Fixes'
+ expect(merge_request).not_to have_received(:to_reference)
+ end
+ end
+
context 'when project has commit template with closed issues' do
let(message_template_name) { <<~MSG.rstrip }
Merge branch '%{source_branch}' into '%{target_branch}'
@@ -274,17 +288,319 @@ RSpec.describe Gitlab::MergeRequests::CommitMessageGenerator do
end
end
end
+
+ context 'when project has merge commit template with approvers' do
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+ let(message_template_name) { <<~MSG.rstrip }
+ Merge branch '%{source_branch}' into '%{target_branch}'
+
+ %{approved_by}
+ MSG
+
+ context 'and mr has no approval' do
+ before do
+ merge_request.approved_by_users = []
+ end
+
+ it 'removes variable and blank line' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+ MSG
+ end
+
+ context 'when there is blank line after approved_by' do
+ let(message_template_name) { <<~MSG.rstrip }
+ Merge branch '%{source_branch}' into '%{target_branch}'
+
+ %{approved_by}
+
+ Type: merge
+ MSG
+
+ it 'removes blank line before it' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Type: merge
+ MSG
+ end
+ end
+
+ context 'when there is no blank line after approved_by' do
+ let(message_template_name) { <<~MSG.rstrip }
+ Merge branch '%{source_branch}' into '%{target_branch}'
+
+ %{approved_by}
+ Type: merge
+ MSG
+
+ it 'does not remove blank line before it' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Type: merge
+ MSG
+ end
+ end
+ end
+
+ context 'and mr has one approval' do
+ before do
+ merge_request.approved_by_users = [user1]
+ end
+
+ it 'returns user name and email' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Approved-by: #{user1.name} <#{user1.email}>
+ MSG
+ end
+ end
+
+ context 'and mr has multiple approvals' do
+ before do
+ merge_request.approved_by_users = [user1, user2]
+ end
+
+ it 'returns users names and emails' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge branch 'feature' into 'master'
+
+ Approved-by: #{user1.name} <#{user1.email}>
+ Approved-by: #{user2.name} <#{user2.email}>
+ MSG
+ end
+ end
+ end
+
+ context 'when project has merge commit template with url' do
+ let(message_template_name) do
+ "Merge Request URL is '%{url}'"
+ end
+
+ context "and merge request has url" do
+ it "returns mr url" do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge Request URL is '#{Gitlab::UrlBuilder.build(merge_request)}'
+ MSG
+ end
+ end
+ end
+
+ context 'when project has merge commit template with merged_by' do
+ let(message_template_name) do
+ "Merge Request merged by '%{merged_by}'"
+ end
+
+ context "and current_user is passed" do
+ it "returns user name and email" do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge Request merged by '#{current_user.name} <#{current_user.email}>'
+ MSG
+ end
+ end
+ end
+
+ context 'user' do
+ subject { described_class.new(merge_request: merge_request, current_user: nil) }
+
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+ let(message_template_name) do
+ "Merge Request merged by '%{merged_by}'"
+ end
+
+ context 'comes from metrics' do
+ before do
+ merge_request.metrics.merged_by = user1
+ end
+
+ it "returns user name and email" do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge Request merged by '#{user1.name} <#{user1.email}>'
+ MSG
+ end
+ end
+
+ context 'comes from merge_user' do
+ before do
+ merge_request.merge_user = user2
+ end
+
+ it "returns user name and email" do
+ expect(result_message).to eq <<~MSG.rstrip
+ Merge Request merged by '#{user2.name} <#{user2.email}>'
+ MSG
+ end
+ end
+ end
+
+ context 'when project has commit template with the same variable used twice' do
+ let(message_template_name) { '%{title} %{title}' }
+
+ it 'uses custom template' do
+ expect(result_message).to eq 'Bugfix Bugfix'
+ end
+ end
+
+ context 'when project has commit template without any variable' do
+ let(message_template_name) { 'static text' }
+
+ it 'uses custom template' do
+ expect(result_message).to eq 'static text'
+ end
+ end
+
+ context 'when project has template with all variables' do
+ let(message_template_name) { <<~MSG.rstrip }
+ source_branch:%{source_branch}
+ target_branch:%{target_branch}
+ title:%{title}
+ issues:%{issues}
+ description:%{description}
+ first_commit:%{first_commit}
+ first_multiline_commit:%{first_multiline_commit}
+ url:%{url}
+ approved_by:%{approved_by}
+ merged_by:%{merged_by}
+ co_authored_by:%{co_authored_by}
+ MSG
+
+ it 'uses custom template' do
+ expect(result_message).to eq <<~MSG.rstrip
+ source_branch:feature
+ target_branch:master
+ title:Bugfix
+ issues:
+ description:Merge Request Description
+ Next line
+ first_commit:Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ first_multiline_commit:Feature added
+
+ Signed-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ url:#{Gitlab::UrlBuilder.build(merge_request)}
+ approved_by:
+ merged_by:#{current_user.name} <#{current_user.commit_email_or_default}>
+ co_authored_by:Co-authored-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>
+ MSG
+ end
+ end
+
+ context 'when project has merge commit template with co_authored_by' do
+ let(:source_branch) { 'signed-commits' }
+ let(message_template_name) { <<~MSG.rstrip }
+ %{title}
+
+ %{co_authored_by}
+ MSG
+
+ it 'uses custom template' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Bugfix
+
+ Co-authored-by: Nannie Bernhard <nannie.bernhard@example.com>
+ Co-authored-by: Winnie Hellmann <winnie@gitlab.com>
+ MSG
+ end
+
+ context 'when author and merging user is one of the commit authors' do
+ let(:author) { create(:user, email: 'nannie.bernhard@example.com') }
+
+ before do
+ merge_request.merge_user = author
+ end
+
+ it 'skips his mail in coauthors' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Bugfix
+
+ Co-authored-by: Winnie Hellmann <winnie@gitlab.com>
+ MSG
+ end
+ end
+
+ context 'when author and merging user is the only author of commits' do
+ let(:author) { create(:user, email: 'dmitriy.zaporozhets@gmail.com') }
+ let(:source_branch) { 'feature' }
+
+ before do
+ merge_request.merge_user = author
+ end
+
+ it 'skips coauthors and empty lines before it' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Bugfix
+ MSG
+ end
+ end
+ end
end
describe '#merge_message' do
let(:result_message) { subject.merge_message }
it_behaves_like 'commit message with template', :merge_commit_template
+
+ context 'when project has merge commit template with co_authored_by' do
+ let(:source_branch) { 'signed-commits' }
+ let(:merge_commit_template) { <<~MSG.rstrip }
+ %{title}
+
+ %{co_authored_by}
+ MSG
+
+ context 'when author and merging user are one of the commit authors' do
+ let(:author) { create(:user, email: 'nannie.bernhard@example.com') }
+ let(:merge_user) { create(:user, email: 'winnie@gitlab.com') }
+
+ before do
+ merge_request.merge_user = merge_user
+ end
+
+ it 'skips merging user, but does not skip merge request author' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Bugfix
+
+ Co-authored-by: Nannie Bernhard <nannie.bernhard@example.com>
+ MSG
+ end
+ end
+ end
end
describe '#squash_message' do
let(:result_message) { subject.squash_message }
it_behaves_like 'commit message with template', :squash_commit_template
+
+ context 'when project has merge commit template with co_authored_by' do
+ let(:source_branch) { 'signed-commits' }
+ let(:squash_commit_template) { <<~MSG.rstrip }
+ %{title}
+
+ %{co_authored_by}
+ MSG
+
+ context 'when author and merging user are one of the commit authors' do
+ let(:author) { create(:user, email: 'nannie.bernhard@example.com') }
+ let(:merge_user) { create(:user, email: 'winnie@gitlab.com') }
+
+ before do
+ merge_request.merge_user = merge_user
+ end
+
+ it 'skips merge request author, but does not skip merging user' do
+ expect(result_message).to eq <<~MSG.rstrip
+ Bugfix
+
+ Co-authored-by: Winnie Hellmann <winnie@gitlab.com>
+ MSG
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
index 9cd1ef4094e..c7afc02f0af 100644
--- a/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/base_exporter_spec.rb
@@ -4,13 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
let(:settings) { double('settings') }
- let(:exporter) { described_class.new(settings) }
- let(:log_filename) { File.join(Rails.root, 'log', 'sidekiq_exporter.log') }
-
- before do
- allow_any_instance_of(described_class).to receive(:log_filename).and_return(log_filename)
- allow_any_instance_of(described_class).to receive(:settings).and_return(settings)
- end
+ let(:log_enabled) { false }
+ let(:exporter) { described_class.new(settings, log_enabled: log_enabled, log_file: 'test_exporter.log') }
describe 'when exporter is enabled' do
before do
@@ -61,6 +56,38 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
exporter.start.join
end
+
+ context 'logging enabled' do
+ let(:log_enabled) { true }
+ let(:logger) { instance_double(WEBrick::Log) }
+
+ before do
+ allow(logger).to receive(:time_format=)
+ allow(logger).to receive(:info)
+ end
+
+ it 'configures a WEBrick logger with the given file' do
+ expect(WEBrick::Log).to receive(:new).with(end_with('test_exporter.log')).and_return(logger)
+
+ exporter
+ end
+
+ it 'logs any errors during startup' do
+ expect(::WEBrick::Log).to receive(:new).and_return(logger)
+ expect(::WEBrick::HTTPServer).to receive(:new).and_raise 'fail'
+ expect(logger).to receive(:error)
+
+ exporter.start
+ end
+ end
+
+ context 'logging disabled' do
+ it 'configures a WEBrick logger with the null device' do
+ expect(WEBrick::Log).to receive(:new).with(File::NULL).and_call_original
+
+ exporter
+ end
+ end
end
describe 'when thread is not alive' do
@@ -111,6 +138,18 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
describe 'request handling' do
using RSpec::Parameterized::TableSyntax
+ let(:fake_collector) do
+ Class.new do
+ def initialize(app, ...)
+ @app = app
+ end
+
+ def call(env)
+ @app.call(env)
+ end
+ end
+ end
+
where(:method_class, :path, :http_status) do
Net::HTTP::Get | '/metrics' | 200
Net::HTTP::Get | '/liveness' | 200
@@ -123,6 +162,8 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
allow(settings).to receive(:port).and_return(0)
allow(settings).to receive(:address).and_return('127.0.0.1')
+ stub_const('Gitlab::Metrics::Exporter::MetricsMiddleware', fake_collector)
+
# We want to wrap original method
# and run handling of requests
# in separate thread
@@ -134,8 +175,6 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
# is raised as we close listeners
end
end
-
- exporter.start.join
end
after do
@@ -146,12 +185,25 @@ RSpec.describe Gitlab::Metrics::Exporter::BaseExporter do
let(:config) { exporter.server.config }
let(:request) { method_class.new(path) }
- it 'responds with proper http_status' do
+ subject(:response) do
http = Net::HTTP.new(config[:BindAddress], config[:Port])
- response = http.request(request)
+ http.request(request)
+ end
+
+ it 'responds with proper http_status' do
+ exporter.start.join
expect(response.code).to eq(http_status.to_s)
end
+
+ it 'collects request metrics' do
+ expect_next_instance_of(fake_collector) do |instance|
+ expect(instance).to receive(:call).and_call_original
+ end
+
+ exporter.start.join
+ response
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/exporter/gc_request_middleware_spec.rb b/spec/lib/gitlab/metrics/exporter/gc_request_middleware_spec.rb
new file mode 100644
index 00000000000..0c70a5de701
--- /dev/null
+++ b/spec/lib/gitlab/metrics/exporter/gc_request_middleware_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Metrics::Exporter::GcRequestMiddleware do
+ let(:app) { double(:app) }
+ let(:env) { {} }
+
+ subject(:middleware) { described_class.new(app) }
+
+ describe '#call' do
+ it 'runs a major GC after the next middleware is called' do
+ expect(app).to receive(:call).with(env).ordered.and_return([200, {}, []])
+ expect(GC).to receive(:start).ordered
+
+ response = middleware.call(env)
+
+ expect(response).to eq([200, {}, []])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/exporter/health_checks_middleware_spec.rb b/spec/lib/gitlab/metrics/exporter/health_checks_middleware_spec.rb
new file mode 100644
index 00000000000..9ee46a45e7a
--- /dev/null
+++ b/spec/lib/gitlab/metrics/exporter/health_checks_middleware_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Metrics::Exporter::HealthChecksMiddleware do
+ let(:app) { double(:app) }
+ let(:env) { { 'PATH_INFO' => path } }
+
+ let(:readiness_probe) { double(:readiness_probe) }
+ let(:liveness_probe) { double(:liveness_probe) }
+ let(:probe_result) { Gitlab::HealthChecks::Probes::Status.new(200, { status: 'ok' }) }
+
+ subject(:middleware) { described_class.new(app, readiness_probe, liveness_probe) }
+
+ describe '#call' do
+ context 'handling /readiness requests' do
+ let(:path) { '/readiness' }
+
+ it 'handles the request' do
+ expect(readiness_probe).to receive(:execute).and_return(probe_result)
+
+ response = middleware.call(env)
+
+ expect(response).to eq([200, { 'Content-Type' => 'application/json; charset=utf-8' }, ['{"status":"ok"}']])
+ end
+ end
+
+ context 'handling /liveness requests' do
+ let(:path) { '/liveness' }
+
+ it 'handles the request' do
+ expect(liveness_probe).to receive(:execute).and_return(probe_result)
+
+ response = middleware.call(env)
+
+ expect(response).to eq([200, { 'Content-Type' => 'application/json; charset=utf-8' }, ['{"status":"ok"}']])
+ end
+ end
+
+ context 'handling other requests' do
+ let(:path) { '/other_path' }
+
+ it 'forwards them to the next middleware' do
+ expect(app).to receive(:call).with(env).and_return([201, {}, []])
+
+ response = middleware.call(env)
+
+ expect(response).to eq([201, {}, []])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/exporter/metrics_middleware_spec.rb b/spec/lib/gitlab/metrics/exporter/metrics_middleware_spec.rb
new file mode 100644
index 00000000000..ac5721f5974
--- /dev/null
+++ b/spec/lib/gitlab/metrics/exporter/metrics_middleware_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Exporter::MetricsMiddleware do
+ let(:app) { double(:app) }
+ let(:pid) { 'fake_exporter' }
+ let(:env) { { 'PATH_INFO' => '/path', 'REQUEST_METHOD' => 'GET' } }
+
+ subject(:middleware) { described_class.new(app, pid) }
+
+ def metric(name, method, path, status)
+ metric = ::Prometheus::Client.registry.get(name)
+ return unless metric
+
+ values = metric.values.transform_keys { |k| k.slice(:method, :path, :pid, :code) }
+ values[{ method: method, path: path, pid: pid, code: status.to_s }]&.get
+ end
+
+ before do
+ expect(app).to receive(:call).with(env).and_return([200, {}, []])
+ end
+
+ describe '#call', :prometheus do
+ it 'records a total requests metric' do
+ response = middleware.call(env)
+
+ expect(response).to eq([200, {}, []])
+ expect(metric(:exporter_http_requests_total, 'get', '/path', 200)).to eq(1.0)
+ end
+
+ it 'records a request duration histogram' do
+ response = middleware.call(env)
+
+ expect(response).to eq([200, {}, []])
+ expect(metric(:exporter_http_request_duration_seconds, 'get', '/path', 200)).to be_a(Hash)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
deleted file mode 100644
index 75bc3ba9626..00000000000
--- a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::Exporter::SidekiqExporter do
- let(:exporter) { described_class.new(Settings.monitoring.sidekiq_exporter) }
-
- after do
- exporter.stop
- end
-
- context 'with valid config' do
- before do
- stub_config(
- monitoring: {
- sidekiq_exporter: {
- enabled: true,
- log_enabled: false,
- port: 0,
- address: '127.0.0.1'
- }
- }
- )
- end
-
- it 'does start thread' do
- expect(exporter.start).not_to be_nil
- end
-
- it 'does not enable logging by default' do
- expect(exporter.log_filename).to eq(File::NULL)
- end
- end
-
- context 'with logging enabled' do
- before do
- stub_config(
- monitoring: {
- sidekiq_exporter: {
- enabled: true,
- log_enabled: true,
- port: 0,
- address: '127.0.0.1'
- }
- }
- )
- end
-
- it 'returns a valid log filename' do
- expect(exporter.log_filename).to end_with('sidekiq_exporter.log')
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
index 9deaecbf41b..0531bccf4b4 100644
--- a/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/web_exporter_spec.rb
@@ -24,14 +24,14 @@ RSpec.describe Gitlab::Metrics::Exporter::WebExporter do
exporter.stop
end
- context 'when running server' do
+ context 'when running server', :prometheus do
it 'readiness probe returns succesful status' do
expect(readiness_probe.http_status).to eq(200)
expect(readiness_probe.json).to include(status: 'ok')
expect(readiness_probe.json).to include('web_exporter' => [{ 'status': 'ok' }])
end
- it 'initializes request metrics', :prometheus do
+ it 'initializes request metrics' do
expect(Gitlab::Metrics::RailsSlis).to receive(:initialize_request_slis_if_needed!).and_call_original
http = Net::HTTP.new(exporter.server.config[:BindAddress], exporter.server.config[:Port])
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::Metrics::Exporter::WebExporter do
end
describe '#mark_as_not_running!' do
- it 'readiness probe returns a failure status' do
+ it 'readiness probe returns a failure status', :prometheus do
exporter.mark_as_not_running!
expect(readiness_probe.http_status).to eq(503)
diff --git a/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
index d834b796179..e1e4877cd50 100644
--- a/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Samplers::ActionCableSampler do
let(:action_cable) { instance_double(ActionCable::Server::Base) }
- subject { described_class.new(action_cable: action_cable) }
+ subject { described_class.new(action_cable: action_cable, logger: double) }
it_behaves_like 'metrics sampler', 'ACTION_CABLE_SAMPLER'
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index e8f8947c9e8..c88d8c17eac 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
end
context 'when replica hosts are configured' do
- let(:main_load_balancer) { ActiveRecord::Base.load_balancer } # rubocop:disable Database/MultipleDatabases
+ let(:main_load_balancer) { ApplicationRecord.load_balancer }
let(:main_replica_host) { main_load_balancer.host }
let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
@@ -117,7 +117,7 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
end
context 'when the base model has replica connections' do
- let(:main_load_balancer) { ActiveRecord::Base.load_balancer } # rubocop:disable Database/MultipleDatabases
+ let(:main_load_balancer) { ApplicationRecord.load_balancer }
let(:main_replica_host) { main_load_balancer.host }
let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index 6f1e0480197..a4877208bcf 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Gitlab::Metrics::Samplers::RubySampler do
end
describe '#sample_gc' do
- let!(:sampler) { described_class.new(5) }
+ let!(:sampler) { described_class.new }
let(:gc_reports) { [{ GC_TIME: 0.1 }, { GC_TIME: 0.2 }, { GC_TIME: 0.3 }] }
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index 1ef548ab29b..bc1d53b2ccb 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -100,7 +100,7 @@ RSpec.describe Gitlab::Middleware::Go do
context 'without access to the project', :sidekiq_inline do
before do
- project.team.find_member(current_user).destroy
+ project.team.find_member(current_user).destroy!
end
it_behaves_like 'unauthorized'
diff --git a/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
new file mode 100644
index 00000000000..c8dbc990f8c
--- /dev/null
+++ b/spec/lib/gitlab/middleware/webhook_recursion_detection_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'action_dispatch'
+require 'rack'
+require 'request_store'
+
+RSpec.describe Gitlab::Middleware::WebhookRecursionDetection do
+ let(:app) { double(:app) }
+ let(:middleware) { described_class.new(app) }
+ let(:env) { Rack::MockRequest.env_for("/").merge(headers) }
+
+ around do |example|
+ Gitlab::WithRequestStore.with_request_store { example.run }
+ end
+
+ describe '#call' do
+ subject(:call) { described_class.new(app).call(env) }
+
+ context 'when the recursion detection header is present' do
+ let(:new_uuid) { SecureRandom.uuid }
+ let(:headers) { { 'HTTP_X_GITLAB_EVENT_UUID' => new_uuid } }
+
+ it 'sets the request UUID from the header' do
+ expect(app).to receive(:call)
+ expect { call }.to change { Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid }.to(new_uuid)
+ end
+ end
+
+ context 'when recursion headers are not present' do
+ let(:headers) { {} }
+
+ it 'works without errors' do
+ expect(app).to receive(:call)
+
+ call
+
+ expect(Gitlab::WebHooks::RecursionDetection::UUID.instance.request_uuid).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_column_data_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_column_data_spec.rb
new file mode 100644
index 00000000000..b4869f49081
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_column_data_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::OrderByColumnData do
+ let(:arel_table) { Issue.arel_table }
+
+ let(:column) do
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ column_expression: arel_table[:id],
+ order_expression: arel_table[:id].desc
+ )
+ end
+
+ subject(:column_data) { described_class.new(column, 'column_alias', arel_table) }
+
+ describe '#arel_column' do
+ it 'delegates to column_expression' do
+ expect(column_data.arel_column).to eq(column.column_expression)
+ end
+ end
+
+ describe '#column_for_projection' do
+ it 'returns the expression with AS using the original column name' do
+ expect(column_data.column_for_projection.to_sql).to eq('"issues"."id" AS id')
+ end
+ end
+
+ describe '#projection' do
+ it 'returns the expression with AS using the specified column lias' do
+ expect(column_data.projection.to_sql).to eq('"issues"."id" AS column_alias')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
index 00beacd4b35..58db22e5a9c 100644
--- a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
@@ -33,14 +33,14 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
]
end
- shared_examples 'correct ordering examples' do
- let(:iterator) do
- Gitlab::Pagination::Keyset::Iterator.new(
- scope: scope.limit(batch_size),
- in_operator_optimization_options: in_operator_optimization_options
- )
- end
+ let(:iterator) do
+ Gitlab::Pagination::Keyset::Iterator.new(
+ scope: scope.limit(batch_size),
+ in_operator_optimization_options: in_operator_optimization_options
+ )
+ end
+ shared_examples 'correct ordering examples' do |opts = {}|
let(:all_records) do
all_records = []
iterator.each_batch(of: batch_size) do |records|
@@ -49,8 +49,10 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
all_records
end
- it 'returns records in correct order' do
- expect(all_records).to eq(expected_order)
+ unless opts[:skip_finder_query_test]
+ it 'returns records in correct order' do
+ expect(all_records).to eq(expected_order)
+ end
end
context 'when not passing the finder query' do
@@ -248,4 +250,57 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
expect { described_class.new(**options).execute }.to raise_error(/The order on the scope does not support keyset pagination/)
end
+
+ context 'when ordering by SQL expression' do
+ let(:order) do
+ # ORDER BY (id * 10), id
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id_multiplied_by_ten',
+ order_expression: Arel.sql('(id * 10)').asc,
+ sql_type: 'integer'
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].asc
+ )
+ ])
+ end
+
+ let(:scope) { Issue.reorder(order) }
+ let(:expected_order) { issues.sort_by(&:id) }
+
+ let(:in_operator_optimization_options) do
+ {
+ array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id),
+ array_mapping_scope: -> (id_expression) { Issue.where(Issue.arel_table[:project_id].eq(id_expression)) }
+ }
+ end
+
+ context 'when iterating records one by one' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+
+ context 'when iterating records with LIMIT 3' do
+ let(:batch_size) { 3 }
+
+ it_behaves_like 'correct ordering examples', skip_finder_query_test: true
+ end
+
+ context 'when passing finder query' do
+ let(:batch_size) { 3 }
+
+ it 'raises error, loading complete rows are not supported with SQL expressions' do
+ in_operator_optimization_options[:finder_query] = -> (_, _) { Issue.select(:id, '(id * 10)').where(id: -1) }
+
+ expect(in_operator_optimization_options[:finder_query]).not_to receive(:call)
+
+ expect do
+ iterator.each_batch(of: batch_size) { |records| records.to_a }
+ end.to raise_error /The "RecordLoaderStrategy" does not support/
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
index fe95d5406dd..ab1037b318b 100644
--- a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/strategies/order_values_loader_strategy_spec.rb
@@ -31,4 +31,41 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::Strategies::O
])
end
end
+
+ context 'when an SQL expression is given' do
+ context 'when the sql_type attribute is missing' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id_times_ten',
+ order_expression: Arel.sql('id * 10').asc
+ )
+ ])
+ end
+
+ let(:keyset_scope) { Project.order(order) }
+
+ it 'raises error' do
+ expect { strategy.initializer_columns }.to raise_error(Gitlab::Pagination::Keyset::SqlTypeMissingError)
+ end
+ end
+
+ context 'when the sql_type_attribute is present' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id_times_ten',
+ order_expression: Arel.sql('id * 10').asc,
+ sql_type: 'integer'
+ )
+ ])
+ end
+
+ let(:keyset_scope) { Project.order(order) }
+
+ it 'returns the initializer columns' do
+ expect(strategy.initializer_columns).to eq(['NULL::integer AS id_times_ten'])
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
deleted file mode 100644
index 76731bb916c..00000000000
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ /dev/null
@@ -1,676 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Redis::MultiStore do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:redis_store_class) do
- Class.new(Gitlab::Redis::Wrapper) do
- def config_file_name
- config_file_name = "spec/fixtures/config/redis_new_format_host.yml"
- Rails.root.join(config_file_name).to_s
- end
-
- def self.name
- 'Sessions'
- end
- end
- end
-
- let_it_be(:primary_db) { 1 }
- let_it_be(:secondary_db) { 2 }
- let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
- let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
- let_it_be(:instance_name) { 'TestStore' }
- let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
-
- subject { multi_store.send(name, *args) }
-
- before do
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- end
-
- after(:all) do
- primary_store.flushdb
- secondary_store.flushdb
- end
-
- context 'when primary_store is nil' do
- let(:multi_store) { described_class.new(nil, secondary_store, instance_name)}
-
- it 'fails with exception' do
- expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
- end
- end
-
- context 'when secondary_store is nil' do
- let(:multi_store) { described_class.new(primary_store, nil, instance_name)}
-
- it 'fails with exception' do
- expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
- end
- end
-
- context 'when instance_name is nil' do
- let(:instance_name) { nil }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
-
- it 'fails with exception' do
- expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
- end
- end
-
- context 'when primary_store is not a ::Redis instance' do
- before do
- allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
- end
-
- it 'fails with exception' do
- expect { described_class.new(primary_store, secondary_store, instance_name) }.to raise_error(ArgumentError, /invalid primary_store/)
- end
- end
-
- context 'when secondary_store is not a ::Redis instance' do
- before do
- allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
- end
-
- it 'fails with exception' do
- expect { described_class.new(primary_store, secondary_store, instance_name) }.to raise_error(ArgumentError, /invalid secondary_store/)
- end
- end
-
- context 'with READ redis commands' do
- let_it_be(:key1) { "redis:{1}:key_a" }
- let_it_be(:key2) { "redis:{1}:key_b" }
- let_it_be(:value1) { "redis_value1"}
- let_it_be(:value2) { "redis_value2"}
- let_it_be(:skey) { "redis:set:key" }
- let_it_be(:keys) { [key1, key2] }
- let_it_be(:values) { [value1, value2] }
- let_it_be(:svalues) { [value2, value1] }
-
- where(:case_name, :name, :args, :value, :block) do
- 'execute :get command' | :get | ref(:key1) | ref(:value1) | nil
- 'execute :mget command' | :mget | ref(:keys) | ref(:values) | nil
- 'execute :mget with block' | :mget | ref(:keys) | ref(:values) | ->(value) { value }
- 'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | nil
- 'execute :scard command' | :scard | ref(:skey) | 2 | nil
- end
-
- before(:all) do
- primary_store.multi do |multi|
- multi.set(key1, value1)
- multi.set(key2, value2)
- multi.sadd(skey, value1)
- multi.sadd(skey, value2)
- end
-
- secondary_store.multi do |multi|
- multi.set(key1, value1)
- multi.set(key2, value2)
- multi.sadd(skey, value1)
- multi.sadd(skey, value2)
- end
- end
-
- RSpec.shared_examples_for 'reads correct value' do
- it 'returns the correct value' do
- if value.is_a?(Array)
- # :smembers does not guarantee the order it will return the values (unsorted set)
- is_expected.to match_array(value)
- else
- is_expected.to eq(value)
- end
- end
- end
-
- RSpec.shared_examples_for 'fallback read from the secondary store' do
- let(:counter) { Gitlab::Metrics::NullMetric.instance }
-
- before do
- allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
- end
-
- it 'fallback and execute on secondary instance' do
- expect(secondary_store).to receive(name).with(*args).and_call_original
-
- subject
- end
-
- it 'logs the ReadFromPrimaryError' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::ReadFromPrimaryError),
- hash_including(command_name: name, extra: hash_including(instance_name: instance_name)))
-
- subject
- end
-
- it 'increment read fallback count metrics' do
- expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
-
- subject
- end
-
- include_examples 'reads correct value'
-
- context 'when fallback read from the secondary instance raises an exception' do
- before do
- allow(secondary_store).to receive(name).with(*args).and_raise(StandardError)
- end
-
- it 'fails with exception' do
- expect { subject }.to raise_error(StandardError)
- end
- end
- end
-
- RSpec.shared_examples_for 'secondary store' do
- it 'execute on the secondary instance' do
- expect(secondary_store).to receive(name).with(*args).and_call_original
-
- subject
- end
-
- include_examples 'reads correct value'
-
- it 'does not execute on the primary store' do
- expect(primary_store).not_to receive(name)
-
- subject
- end
- end
-
- with_them do
- describe "#{name}" do
- before do
- allow(primary_store).to receive(name).and_call_original
- allow(secondary_store).to receive(name).and_call_original
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- context 'when reading from the primary is successful' do
- it 'returns the correct value' do
- expect(primary_store).to receive(name).with(*args).and_call_original
-
- subject
- end
-
- it 'does not execute on the secondary store' do
- expect(secondary_store).not_to receive(name)
-
- subject
- end
-
- include_examples 'reads correct value'
- end
-
- context 'when reading from primary instance is raising an exception' do
- before do
- allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
- allow(Gitlab::ErrorTracking).to receive(:log_exception)
- end
-
- it 'logs the exception' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
- hash_including(extra: hash_including(:multi_store_error_message, instance_name: instance_name),
- command_name: name))
-
- subject
- end
-
- include_examples 'fallback read from the secondary store'
- end
-
- context 'when reading from primary instance return no value' do
- before do
- allow(primary_store).to receive(name).and_return(nil)
- end
-
- include_examples 'fallback read from the secondary store'
- end
-
- context 'when the command is executed within pipelined block' do
- subject do
- multi_store.pipelined do
- multi_store.send(name, *args)
- end
- end
-
- it 'is executed only 1 time on primary instance' do
- expect(primary_store).to receive(name).with(*args).once
-
- subject
- end
- end
-
- if params[:block]
- subject do
- multi_store.send(name, *args, &block)
- end
-
- context 'when block is provided' do
- it 'yields to the block' do
- expect(primary_store).to receive(name).and_yield(value)
-
- subject
- end
-
- include_examples 'reads correct value'
- end
- end
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it_behaves_like 'secondary store'
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'execute on the primary instance' do
- expect(primary_store).to receive(name).with(*args).and_call_original
-
- subject
- end
-
- include_examples 'reads correct value'
-
- it 'does not execute on the secondary store' do
- expect(secondary_store).not_to receive(name)
-
- subject
- end
- end
- end
-
- context 'with both primary and secondary store using same redis instance' do
- let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
- let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
-
- it_behaves_like 'secondary store'
- end
- end
- end
- end
-
- context 'with WRITE redis commands' do
- let_it_be(:key1) { "redis:{1}:key_a" }
- let_it_be(:key2) { "redis:{1}:key_b" }
- let_it_be(:value1) { "redis_value1"}
- let_it_be(:value2) { "redis_value2"}
- let_it_be(:key1_value1) { [key1, value1] }
- let_it_be(:key1_value2) { [key1, value2] }
- let_it_be(:ttl) { 10 }
- let_it_be(:key1_ttl_value1) { [key1, ttl, value1] }
- let_it_be(:skey) { "redis:set:key" }
- let_it_be(:svalues1) { [value2, value1] }
- let_it_be(:svalues2) { [value1] }
- let_it_be(:skey_value1) { [skey, value1] }
- let_it_be(:skey_value2) { [skey, value2] }
-
- where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do
- 'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
- 'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
- 'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
- 'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
- 'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
- 'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
- 'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
- end
-
- before do
- primary_store.flushdb
- secondary_store.flushdb
-
- primary_store.multi do |multi|
- multi.set(key2, value1)
- multi.sadd(skey, value1)
- end
-
- secondary_store.multi do |multi|
- multi.set(key2, value1)
- multi.sadd(skey, value1)
- end
- end
-
- RSpec.shared_examples_for 'verify that store contains values' do |store|
- it "#{store} redis store contains correct values", :aggregate_errors do
- subject
-
- redis_store = multi_store.send(store)
-
- if expected_value.is_a?(Array)
- # :smembers does not guarantee the order it will return the values
- expect(redis_store.send(verification_name, *verification_args)).to match_array(expected_value)
- else
- expect(redis_store.send(verification_name, *verification_args)).to eq(expected_value)
- end
- end
- end
-
- with_them do
- describe "#{name}" do
- let(:expected_args) {args || no_args }
-
- before do
- allow(primary_store).to receive(name).and_call_original
- allow(secondary_store).to receive(name).and_call_original
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- context 'when executing on primary instance is successful' do
- it 'executes on both primary and secondary redis store', :aggregate_errors do
- expect(primary_store).to receive(name).with(*expected_args).and_call_original
- expect(secondary_store).to receive(name).with(*expected_args).and_call_original
-
- subject
- end
-
- include_examples 'verify that store contains values', :primary_store
- include_examples 'verify that store contains values', :secondary_store
- end
-
- context 'when executing on the primary instance is raising an exception' do
- before do
- allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
- allow(Gitlab::ErrorTracking).to receive(:log_exception)
- end
-
- it 'logs the exception and execute on secondary instance', :aggregate_errors do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
- hash_including(extra: hash_including(:multi_store_error_message), command_name: name))
- expect(secondary_store).to receive(name).with(*expected_args).and_call_original
-
- subject
- end
-
- include_examples 'verify that store contains values', :secondary_store
- end
-
- context 'when the command is executed within pipelined block' do
- subject do
- multi_store.pipelined do
- multi_store.send(name, *args)
- end
- end
-
- it 'is executed only 1 time on each instance', :aggregate_errors do
- expect(primary_store).to receive(name).with(*expected_args).once
- expect(secondary_store).to receive(name).with(*expected_args).once
-
- subject
- end
-
- include_examples 'verify that store contains values', :primary_store
- include_examples 'verify that store contains values', :secondary_store
- end
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it 'executes only on the secondary redis store', :aggregate_errors do
- expect(secondary_store).to receive(name).with(*expected_args)
- expect(primary_store).not_to receive(name).with(*expected_args)
-
- subject
- end
-
- include_examples 'verify that store contains values', :secondary_store
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'executes only on the primary_redis redis store', :aggregate_errors do
- expect(primary_store).to receive(name).with(*expected_args)
- expect(secondary_store).not_to receive(name).with(*expected_args)
-
- subject
- end
-
- include_examples 'verify that store contains values', :primary_store
- end
- end
- end
- end
- end
-
- context 'with unsupported command' do
- let(:counter) { Gitlab::Metrics::NullMetric.instance }
-
- before do
- primary_store.flushdb
- secondary_store.flushdb
- allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
- end
-
- let_it_be(:key) { "redis:counter" }
-
- subject { multi_store.incr(key) }
-
- it 'executes method missing' do
- expect(multi_store).to receive(:method_missing)
-
- subject
- end
-
- context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
- it 'logs MethodMissingError' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
- hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name)))
-
- subject
- end
-
- it 'increments method missing counter' do
- expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
-
- subject
- end
- end
-
- context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
- subject { multi_store.info }
-
- it 'does not log MethodMissingError' do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
-
- subject
- end
-
- it 'does not increment method missing counter' do
- expect(counter).not_to receive(:increment)
-
- subject
- end
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'fallback and executes only on the secondary store', :aggregate_errors do
- expect(primary_store).to receive(:incr).with(key).and_call_original
- expect(secondary_store).not_to receive(:incr)
-
- subject
- end
-
- it 'correct value is stored on the secondary store', :aggregate_errors do
- subject
-
- expect(secondary_store.get(key)).to be_nil
- expect(primary_store.get(key)).to eq('1')
- end
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it 'fallback and executes only on the secondary store', :aggregate_errors do
- expect(secondary_store).to receive(:incr).with(key).and_call_original
- expect(primary_store).not_to receive(:incr)
-
- subject
- end
-
- it 'correct value is stored on the secondary store', :aggregate_errors do
- subject
-
- expect(primary_store.get(key)).to be_nil
- expect(secondary_store.get(key)).to eq('1')
- end
- end
-
- context 'when the command is executed within pipelined block' do
- subject do
- multi_store.pipelined do
- multi_store.incr(key)
- end
- end
-
- it 'is executed only 1 time on each instance', :aggregate_errors do
- expect(primary_store).to receive(:incr).with(key).once
- expect(secondary_store).to receive(:incr).with(key).once
-
- subject
- end
-
- it "both redis stores are containing correct values", :aggregate_errors do
- subject
-
- expect(primary_store.get(key)).to eq('1')
- expect(secondary_store.get(key)).to eq('1')
- end
- end
- end
-
- describe '#to_s' do
- subject { multi_store.to_s }
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- it 'returns same value as primary_store' do
- is_expected.to eq(primary_store.to_s)
- end
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'returns same value as primary_store' do
- is_expected.to eq(primary_store.to_s)
- end
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it 'returns same value as primary_store' do
- is_expected.to eq(secondary_store.to_s)
- end
- end
- end
- end
-
- describe '#is_a?' do
- it 'returns true for ::Redis::Store' do
- expect(multi_store.is_a?(::Redis::Store)).to be true
- end
- end
-
- describe '#use_primary_and_secondary_stores?' do
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be true
- end
- end
-
- context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_and_secondary_stores?).to be false
- end
- end
- end
-
- describe '#use_primary_store_as_default?' do
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_store_as_default?).to be true
- end
- end
-
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
- before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
- end
-
- it 'multi store is disabled' do
- expect(multi_store.use_primary_store_as_default?).to be false
- end
- end
- end
-
- def create_redis_store(options, extras = {})
- ::Redis::Store.new(options.merge(extras))
- end
-end
diff --git a/spec/lib/gitlab/redis/sessions_spec.rb b/spec/lib/gitlab/redis/sessions_spec.rb
index 6ecbbf3294d..b02864cb73d 100644
--- a/spec/lib/gitlab/redis/sessions_spec.rb
+++ b/spec/lib/gitlab/redis/sessions_spec.rb
@@ -6,31 +6,16 @@ RSpec.describe Gitlab::Redis::Sessions do
it_behaves_like "redis_new_instance_shared_examples", 'sessions', Gitlab::Redis::SharedState
describe 'redis instance used in connection pool' do
- before do
+ around do |example|
clear_pool
- end
-
- after do
+ example.run
+ ensure
clear_pool
end
- context 'when redis.sessions configuration is not provided' do
- it 'uses ::Redis instance' do
- expect(described_class).to receive(:config_fallback?).and_return(true)
-
- described_class.pool.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Redis)
- end
- end
- end
-
- context 'when redis.sessions configuration is provided' do
- it 'instantiates an instance of MultiStore' do
- expect(described_class).to receive(:config_fallback?).and_return(false)
-
- described_class.pool.with do |redis_instance|
- expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
- end
+ it 'uses ::Redis instance' do
+ described_class.pool.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Redis)
end
end
@@ -44,49 +29,9 @@ RSpec.describe Gitlab::Redis::Sessions do
describe '#store' do
subject(:store) { described_class.store(namespace: described_class::SESSION_NAMESPACE) }
- context 'when redis.sessions configuration is NOT provided' do
- it 'instantiates ::Redis instance' do
- expect(described_class).to receive(:config_fallback?).and_return(true)
- expect(store).to be_instance_of(::Redis::Store)
- end
- end
-
- context 'when redis.sessions configuration is provided' do
- let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
- let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
-
- before do
- redis_clear_raw_config!(Gitlab::Redis::Sessions)
- redis_clear_raw_config!(Gitlab::Redis::SharedState)
- allow(described_class).to receive(:config_fallback?).and_return(false)
- end
-
- after do
- redis_clear_raw_config!(Gitlab::Redis::Sessions)
- redis_clear_raw_config!(Gitlab::Redis::SharedState)
- end
-
- # Check that Gitlab::Redis::Sessions is configured as MultiStore with proper attrs.
- it 'instantiates an instance of MultiStore', :aggregate_failures do
- expect(described_class).to receive(:config_file_name).and_return(config_new_format_host)
- expect(::Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
-
- expect(store).to be_instance_of(::Gitlab::Redis::MultiStore)
-
- expect(store.primary_store.to_s).to eq("Redis Client connected to test-host:6379 against DB 99 with namespace session:gitlab")
- expect(store.secondary_store.to_s).to eq("Redis Client connected to /path/to/redis.sock against DB 0 with namespace session:gitlab")
-
- expect(store.instance_name).to eq('Sessions')
- end
-
- context 'when MultiStore correctly configured' do
- before do
- allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
- allow(::Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_socket)
- end
-
- it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sessions, :use_primary_store_as_default_for_sessions
- end
+ # Check that Gitlab::Redis::Sessions is configured as RedisStore.
+ it 'instantiates an instance of Redis::Store' do
+ expect(store).to be_instance_of(::Redis::Store)
end
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 83f85cc73d0..8d67350f0f3 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -433,6 +433,7 @@ RSpec.describe Gitlab::Regex do
describe '.nuget_version_regex' do
subject { described_class.nuget_version_regex }
+ it { is_expected.to match('1.2') }
it { is_expected.to match('1.2.3') }
it { is_expected.to match('1.2.3.4') }
it { is_expected.to match('1.2.3.4-stable.1') }
@@ -440,7 +441,6 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to match('1.2.3-alpha.3') }
it { is_expected.to match('1.0.7+r3456') }
it { is_expected.not_to match('1') }
- it { is_expected.not_to match('1.2') }
it { is_expected.not_to match('1./2.3') }
it { is_expected.not_to match('../../../../../1.2.3') }
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
diff --git a/spec/lib/gitlab/search/params_spec.rb b/spec/lib/gitlab/search/params_spec.rb
index 6d15337b872..13770e550ec 100644
--- a/spec/lib/gitlab/search/params_spec.rb
+++ b/spec/lib/gitlab/search/params_spec.rb
@@ -133,4 +133,12 @@ RSpec.describe Gitlab::Search::Params do
end
end
end
+
+ describe '#email_lookup?' do
+ it 'is true if at least 1 word in search is an email' do
+ expect(described_class.new({ search: 'email@example.com' })).to be_email_lookup
+ expect(described_class.new({ search: 'foo email@example.com bar' })).to be_email_lookup
+ expect(described_class.new({ search: 'foo bar' })).not_to be_email_lookup
+ end
+ end
end
diff --git a/spec/lib/gitlab/shard_health_cache_spec.rb b/spec/lib/gitlab/shard_health_cache_spec.rb
index 5c47ac7e9a0..0c25cc7dab5 100644
--- a/spec/lib/gitlab/shard_health_cache_spec.rb
+++ b/spec/lib/gitlab/shard_health_cache_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::ShardHealthCache, :clean_gitlab_redis_cache do
let(:shards) { %w(foo bar) }
before do
- described_class.update(shards)
+ described_class.update(shards) # rubocop:disable Rails/SaveBang
end
describe '.clear' do
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::ShardHealthCache, :clean_gitlab_redis_cache do
it 'replaces the existing set' do
new_set = %w(test me more)
- described_class.update(new_set)
+ described_class.update(new_set) # rubocop:disable Rails/SaveBang
expect(described_class.cached_healthy_shards).to match_array(new_set)
end
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::ShardHealthCache, :clean_gitlab_redis_cache do
end
it 'returns 0 if no shards are available' do
- described_class.update([])
+ described_class.update([]) # rubocop:disable Rails/SaveBang
expect(described_class.healthy_shard_count).to eq(0)
end
diff --git a/spec/lib/gitlab/sherlock/collection_spec.rb b/spec/lib/gitlab/sherlock/collection_spec.rb
deleted file mode 100644
index fcf8e6638f8..00000000000
--- a/spec/lib/gitlab/sherlock/collection_spec.rb
+++ /dev/null
@@ -1,84 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::Collection do
- let(:collection) { described_class.new }
-
- let(:transaction) do
- Gitlab::Sherlock::Transaction.new('POST', '/cat_pictures')
- end
-
- describe '#add' do
- it 'adds a new transaction' do
- collection.add(transaction)
-
- expect(collection).not_to be_empty
- end
-
- it 'is aliased as <<' do
- collection << transaction
-
- expect(collection).not_to be_empty
- end
- end
-
- describe '#each' do
- it 'iterates over every transaction' do
- collection.add(transaction)
-
- expect { |b| collection.each(&b) }.to yield_with_args(transaction)
- end
- end
-
- describe '#clear' do
- it 'removes all transactions' do
- collection.add(transaction)
-
- collection.clear
-
- expect(collection).to be_empty
- end
- end
-
- describe '#empty?' do
- it 'returns true for an empty collection' do
- expect(collection).to be_empty
- end
-
- it 'returns false for a collection with a transaction' do
- collection.add(transaction)
-
- expect(collection).not_to be_empty
- end
- end
-
- describe '#find_transaction' do
- it 'returns the transaction for the given ID' do
- collection.add(transaction)
-
- expect(collection.find_transaction(transaction.id)).to eq(transaction)
- end
-
- it 'returns nil when no transaction could be found' do
- collection.add(transaction)
-
- expect(collection.find_transaction('cats')).to be_nil
- end
- end
-
- describe '#newest_first' do
- it 'returns transactions sorted from new to old' do
- trans1 = Gitlab::Sherlock::Transaction.new('POST', '/cat_pictures')
- trans2 = Gitlab::Sherlock::Transaction.new('POST', '/more_cat_pictures')
-
- allow(trans1).to receive(:finished_at).and_return(Time.utc(2015, 1, 1))
- allow(trans2).to receive(:finished_at).and_return(Time.utc(2015, 1, 2))
-
- collection.add(trans1)
- collection.add(trans2)
-
- expect(collection.newest_first).to eq([trans2, trans1])
- end
- end
-end
diff --git a/spec/lib/gitlab/sherlock/file_sample_spec.rb b/spec/lib/gitlab/sherlock/file_sample_spec.rb
deleted file mode 100644
index 8a1aa51e2d4..00000000000
--- a/spec/lib/gitlab/sherlock/file_sample_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::FileSample do
- let(:sample) { described_class.new(__FILE__, [], 150.4, 2) }
-
- describe '#id' do
- it 'returns the ID' do
- expect(sample.id).to be_an_instance_of(String)
- end
- end
-
- describe '#file' do
- it 'returns the file path' do
- expect(sample.file).to eq(__FILE__)
- end
- end
-
- describe '#line_samples' do
- it 'returns the line samples' do
- expect(sample.line_samples).to eq([])
- end
- end
-
- describe '#events' do
- it 'returns the total number of events' do
- expect(sample.events).to eq(2)
- end
- end
-
- describe '#duration' do
- it 'returns the total execution time' do
- expect(sample.duration).to eq(150.4)
- end
- end
-
- describe '#relative_path' do
- it 'returns the relative path' do
- expect(sample.relative_path)
- .to eq('spec/lib/gitlab/sherlock/file_sample_spec.rb')
- end
- end
-
- describe '#to_param' do
- it 'returns the sample ID' do
- expect(sample.to_param).to eq(sample.id)
- end
- end
-
- describe '#source' do
- it 'returns the contents of the file' do
- expect(sample.source).to eq(File.read(__FILE__))
- end
- end
-end
diff --git a/spec/lib/gitlab/sherlock/line_profiler_spec.rb b/spec/lib/gitlab/sherlock/line_profiler_spec.rb
deleted file mode 100644
index 2220a2cafc8..00000000000
--- a/spec/lib/gitlab/sherlock/line_profiler_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::LineProfiler do
- let(:profiler) { described_class.new }
-
- describe '#profile' do
- it 'runs the profiler when using MRI' do
- allow(profiler).to receive(:mri?).and_return(true)
- allow(profiler).to receive(:profile_mri)
-
- profiler.profile { 'cats' }
- end
-
- it 'raises NotImplementedError when profiling an unsupported platform' do
- allow(profiler).to receive(:mri?).and_return(false)
-
- expect { profiler.profile { 'cats' } }.to raise_error(NotImplementedError)
- end
- end
-
- describe '#profile_mri' do
- it 'returns an Array containing the return value and profiling samples' do
- allow(profiler).to receive(:lineprof)
- .and_yield
- .and_return({ __FILE__ => [[0, 0, 0, 0]] })
-
- retval, samples = profiler.profile_mri { 42 }
-
- expect(retval).to eq(42)
- expect(samples).to eq([])
- end
- end
-
- describe '#aggregate_rblineprof' do
- let(:raw_samples) do
- { __FILE__ => [[30000, 30000, 5, 0], [15000, 15000, 4, 0]] }
- end
-
- it 'returns an Array of FileSample objects' do
- samples = profiler.aggregate_rblineprof(raw_samples)
-
- expect(samples).to be_an_instance_of(Array)
- expect(samples[0]).to be_an_instance_of(Gitlab::Sherlock::FileSample)
- end
-
- describe 'the first FileSample object' do
- let(:file_sample) do
- profiler.aggregate_rblineprof(raw_samples)[0]
- end
-
- it 'uses the correct file path' do
- expect(file_sample.file).to eq(__FILE__)
- end
-
- it 'contains a list of line samples' do
- line_sample = file_sample.line_samples[0]
-
- expect(line_sample).to be_an_instance_of(Gitlab::Sherlock::LineSample)
-
- expect(line_sample.duration).to eq(15.0)
- expect(line_sample.events).to eq(4)
- end
-
- it 'contains the total file execution time' do
- expect(file_sample.duration).to eq(30.0)
- end
-
- it 'contains the total amount of file events' do
- expect(file_sample.events).to eq(5)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/sherlock/line_sample_spec.rb b/spec/lib/gitlab/sherlock/line_sample_spec.rb
deleted file mode 100644
index db031377787..00000000000
--- a/spec/lib/gitlab/sherlock/line_sample_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::LineSample do
- let(:sample) { described_class.new(150.0, 4) }
-
- describe '#duration' do
- it 'returns the duration' do
- expect(sample.duration).to eq(150.0)
- end
- end
-
- describe '#events' do
- it 'returns the amount of events' do
- expect(sample.events).to eq(4)
- end
- end
-
- describe '#percentage_of' do
- it 'returns the percentage of 1500.0' do
- expect(sample.percentage_of(1500.0)).to be_within(0.1).of(10.0)
- end
- end
-
- describe '#majority_of' do
- it 'returns true if the sample takes up the majority of the given duration' do
- expect(sample.majority_of?(500.0)).to eq(true)
- end
-
- it "returns false if the sample doesn't take up the majority of the given duration" do
- expect(sample.majority_of?(1500.0)).to eq(false)
- end
- end
-end
diff --git a/spec/lib/gitlab/sherlock/location_spec.rb b/spec/lib/gitlab/sherlock/location_spec.rb
deleted file mode 100644
index 4a8b5dffba2..00000000000
--- a/spec/lib/gitlab/sherlock/location_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::Location do
- let(:location) { described_class.new(__FILE__, 1) }
-
- describe 'from_ruby_location' do
- it 'creates a Location from a Thread::Backtrace::Location' do
- input = caller_locations[0]
- output = described_class.from_ruby_location(input)
-
- expect(output).to be_an_instance_of(described_class)
- expect(output.path).to eq(input.path)
- expect(output.line).to eq(input.lineno)
- end
- end
-
- describe '#path' do
- it 'returns the file path' do
- expect(location.path).to eq(__FILE__)
- end
- end
-
- describe '#line' do
- it 'returns the line number' do
- expect(location.line).to eq(1)
- end
- end
-
- describe '#application?' do
- it 'returns true for an application frame' do
- expect(location.application?).to eq(true)
- end
-
- it 'returns false for a non application frame' do
- loc = described_class.new('/tmp/cats.rb', 1)
-
- expect(loc.application?).to eq(false)
- end
- end
-end
diff --git a/spec/lib/gitlab/sherlock/middleware_spec.rb b/spec/lib/gitlab/sherlock/middleware_spec.rb
deleted file mode 100644
index 645bde6681d..00000000000
--- a/spec/lib/gitlab/sherlock/middleware_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::Middleware do
- let(:app) { double(:app) }
- let(:middleware) { described_class.new(app) }
-
- describe '#call' do
- describe 'when instrumentation is enabled' do
- it 'instruments a request' do
- allow(middleware).to receive(:instrument?).and_return(true)
- allow(middleware).to receive(:call_with_instrumentation)
-
- middleware.call({})
- end
- end
-
- describe 'when instrumentation is disabled' do
- it "doesn't instrument a request" do
- allow(middleware).to receive(:instrument).and_return(false)
- allow(app).to receive(:call)
-
- middleware.call({})
- end
- end
- end
-
- describe '#call_with_instrumentation' do
- it 'instruments a request' do
- trans = double(:transaction)
- retval = 'cats are amazing'
- env = {}
-
- allow(app).to receive(:call).with(env).and_return(retval)
- allow(middleware).to receive(:transaction_from_env).and_return(trans)
- allow(trans).to receive(:run).and_yield.and_return(retval)
- allow(Gitlab::Sherlock.collection).to receive(:add).with(trans)
-
- middleware.call_with_instrumentation(env)
- end
- end
-
- describe '#instrument?' do
- it 'returns false for a text/css request' do
- env = { 'HTTP_ACCEPT' => 'text/css', 'REQUEST_URI' => '/' }
-
- expect(middleware.instrument?(env)).to eq(false)
- end
-
- it 'returns false for a request to a Sherlock route' do
- env = {
- 'HTTP_ACCEPT' => 'text/html',
- 'REQUEST_URI' => '/sherlock/transactions'
- }
-
- expect(middleware.instrument?(env)).to eq(false)
- end
-
- it 'returns true for a request that should be instrumented' do
- env = {
- 'HTTP_ACCEPT' => 'text/html',
- 'REQUEST_URI' => '/cats'
- }
-
- expect(middleware.instrument?(env)).to eq(true)
- end
- end
-
- describe '#transaction_from_env' do
- it 'returns a Transaction' do
- env = {
- 'HTTP_ACCEPT' => 'text/html',
- 'REQUEST_URI' => '/cats'
- }
-
- expect(middleware.transaction_from_env(env))
- .to be_an_instance_of(Gitlab::Sherlock::Transaction)
- end
- end
-end
diff --git a/spec/lib/gitlab/sherlock/query_spec.rb b/spec/lib/gitlab/sherlock/query_spec.rb
deleted file mode 100644
index b8dfd082c37..00000000000
--- a/spec/lib/gitlab/sherlock/query_spec.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::Query do
- let(:started_at) { Time.utc(2015, 1, 1) }
- let(:finished_at) { started_at + 5 }
-
- let(:query) do
- described_class.new('SELECT COUNT(*) FROM users', started_at, finished_at)
- end
-
- describe 'new_with_bindings' do
- it 'returns a Query' do
- sql = 'SELECT COUNT(*) FROM users WHERE id = $1'
- bindings = [[double(:column), 10]]
-
- query = described_class
- .new_with_bindings(sql, bindings, started_at, finished_at)
-
- expect(query.query).to eq('SELECT COUNT(*) FROM users WHERE id = 10;')
- end
- end
-
- describe '#id' do
- it 'returns a String' do
- expect(query.id).to be_an_instance_of(String)
- end
- end
-
- describe '#query' do
- it 'returns the query with a trailing semi-colon' do
- expect(query.query).to eq('SELECT COUNT(*) FROM users;')
- end
- end
-
- describe '#started_at' do
- it 'returns the start time' do
- expect(query.started_at).to eq(started_at)
- end
- end
-
- describe '#finished_at' do
- it 'returns the completion time' do
- expect(query.finished_at).to eq(finished_at)
- end
- end
-
- describe '#backtrace' do
- it 'returns the backtrace' do
- expect(query.backtrace).to be_an_instance_of(Array)
- end
- end
-
- describe '#duration' do
- it 'returns the duration in milliseconds' do
- expect(query.duration).to be_within(0.1).of(5000.0)
- end
- end
-
- describe '#to_param' do
- it 'returns the query ID' do
- expect(query.to_param).to eq(query.id)
- end
- end
-
- describe '#formatted_query' do
- it 'returns a formatted version of the query' do
- expect(query.formatted_query).to eq(<<-EOF.strip)
-SELECT COUNT(*)
-FROM users;
- EOF
- end
- end
-
- describe '#last_application_frame' do
- it 'returns the last application frame' do
- frame = query.last_application_frame
-
- expect(frame).to be_an_instance_of(Gitlab::Sherlock::Location)
- expect(frame.path).to eq(__FILE__)
- end
- end
-
- describe '#application_backtrace' do
- it 'returns an Array of application frames' do
- frames = query.application_backtrace
-
- expect(frames).to be_an_instance_of(Array)
- expect(frames).not_to be_empty
-
- frames.each do |frame|
- expect(frame.path).to start_with(Rails.root.to_s)
- end
- end
- end
-
- describe '#explain' do
- it 'returns the query plan as a String' do
- lines = [
- ['Aggregate (cost=123 rows=1)'],
- [' -> Index Only Scan using index_cats_are_amazing']
- ]
-
- result = double(:result, values: lines)
-
- allow(query).to receive(:raw_explain).and_return(result)
-
- expect(query.explain).to eq(<<-EOF.strip)
-Aggregate (cost=123 rows=1)
- -> Index Only Scan using index_cats_are_amazing
- EOF
- end
- end
-end
diff --git a/spec/lib/gitlab/sherlock/transaction_spec.rb b/spec/lib/gitlab/sherlock/transaction_spec.rb
deleted file mode 100644
index 535b0ad4d8a..00000000000
--- a/spec/lib/gitlab/sherlock/transaction_spec.rb
+++ /dev/null
@@ -1,238 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Sherlock::Transaction do
- let(:transaction) { described_class.new('POST', '/cat_pictures') }
-
- describe '#id' do
- it 'returns the transaction ID' do
- expect(transaction.id).to be_an_instance_of(String)
- end
- end
-
- describe '#type' do
- it 'returns the type' do
- expect(transaction.type).to eq('POST')
- end
- end
-
- describe '#path' do
- it 'returns the path' do
- expect(transaction.path).to eq('/cat_pictures')
- end
- end
-
- describe '#queries' do
- it 'returns an Array of queries' do
- expect(transaction.queries).to be_an_instance_of(Array)
- end
- end
-
- describe '#file_samples' do
- it 'returns an Array of file samples' do
- expect(transaction.file_samples).to be_an_instance_of(Array)
- end
- end
-
- describe '#started_at' do
- it 'returns the start time' do
- allow(transaction).to receive(:profile_lines).and_yield
-
- transaction.run { 'cats are amazing' }
-
- expect(transaction.started_at).to be_an_instance_of(Time)
- end
- end
-
- describe '#finished_at' do
- it 'returns the completion time' do
- allow(transaction).to receive(:profile_lines).and_yield
-
- transaction.run { 'cats are amazing' }
-
- expect(transaction.finished_at).to be_an_instance_of(Time)
- end
- end
-
- describe '#view_counts' do
- it 'returns a Hash' do
- expect(transaction.view_counts).to be_an_instance_of(Hash)
- end
-
- it 'sets the default value of a key to 0' do
- expect(transaction.view_counts['cats.rb']).to be_zero
- end
- end
-
- describe '#run' do
- it 'runs the transaction' do
- allow(transaction).to receive(:profile_lines).and_yield
-
- retval = transaction.run { 'cats are amazing' }
-
- expect(retval).to eq('cats are amazing')
- end
- end
-
- describe '#duration' do
- it 'returns the duration in seconds' do
- start_time = Time.now
-
- allow(transaction).to receive(:started_at).and_return(start_time)
- allow(transaction).to receive(:finished_at).and_return(start_time + 5)
-
- expect(transaction.duration).to be_within(0.1).of(5.0)
- end
- end
-
- describe '#query_duration' do
- it 'returns the total query duration in seconds' do
- time = Time.now
- query1 = Gitlab::Sherlock::Query.new('SELECT 1', time, time + 5)
- query2 = Gitlab::Sherlock::Query.new('SELECT 2', time, time + 2)
-
- transaction.queries << query1
- transaction.queries << query2
-
- expect(transaction.query_duration).to be_within(0.1).of(7.0)
- end
- end
-
- describe '#to_param' do
- it 'returns the transaction ID' do
- expect(transaction.to_param).to eq(transaction.id)
- end
- end
-
- describe '#sorted_queries' do
- it 'returns the queries in descending order' do
- start_time = Time.now
-
- query1 = Gitlab::Sherlock::Query.new('SELECT 1', start_time, start_time)
-
- query2 = Gitlab::Sherlock::Query
- .new('SELECT 2', start_time, start_time + 5)
-
- transaction.queries << query1
- transaction.queries << query2
-
- expect(transaction.sorted_queries).to eq([query2, query1])
- end
- end
-
- describe '#sorted_file_samples' do
- it 'returns the file samples in descending order' do
- sample1 = Gitlab::Sherlock::FileSample.new(__FILE__, [], 10.0, 1)
- sample2 = Gitlab::Sherlock::FileSample.new(__FILE__, [], 15.0, 1)
-
- transaction.file_samples << sample1
- transaction.file_samples << sample2
-
- expect(transaction.sorted_file_samples).to eq([sample2, sample1])
- end
- end
-
- describe '#find_query' do
- it 'returns a Query when found' do
- query = Gitlab::Sherlock::Query.new('SELECT 1', Time.now, Time.now)
-
- transaction.queries << query
-
- expect(transaction.find_query(query.id)).to eq(query)
- end
-
- it 'returns nil when no query could be found' do
- expect(transaction.find_query('cats')).to be_nil
- end
- end
-
- describe '#find_file_sample' do
- it 'returns a FileSample when found' do
- sample = Gitlab::Sherlock::FileSample.new(__FILE__, [], 10.0, 1)
-
- transaction.file_samples << sample
-
- expect(transaction.find_file_sample(sample.id)).to eq(sample)
- end
-
- it 'returns nil when no file sample could be found' do
- expect(transaction.find_file_sample('cats')).to be_nil
- end
- end
-
- describe '#profile_lines' do
- describe 'when line profiling is enabled' do
- it 'yields the block using the line profiler' do
- allow(Gitlab::Sherlock).to receive(:enable_line_profiler?)
- .and_return(true)
-
- allow_next_instance_of(Gitlab::Sherlock::LineProfiler) do |instance|
- allow(instance).to receive(:profile).and_return('cats are amazing', [])
- end
-
- retval = transaction.profile_lines { 'cats are amazing' }
-
- expect(retval).to eq('cats are amazing')
- end
- end
-
- describe 'when line profiling is disabled' do
- it 'yields the block' do
- allow(Gitlab::Sherlock).to receive(:enable_line_profiler?)
- .and_return(false)
-
- retval = transaction.profile_lines { 'cats are amazing' }
-
- expect(retval).to eq('cats are amazing')
- end
- end
- end
-
- describe '#subscribe_to_active_record' do
- let(:subscription) { transaction.subscribe_to_active_record }
- let(:time) { Time.now }
- let(:query_data) { { sql: 'SELECT 1', binds: [] } }
-
- after do
- ActiveSupport::Notifications.unsubscribe(subscription)
- end
-
- it 'tracks executed queries' do
- expect(transaction).to receive(:track_query)
- .with('SELECT 1', [], time, time)
-
- subscription.publish('test', time, time, nil, query_data)
- end
-
- it 'only tracks queries triggered from the transaction thread' do
- expect(transaction).not_to receive(:track_query)
-
- Thread.new { subscription.publish('test', time, time, nil, query_data) }
- .join
- end
- end
-
- describe '#subscribe_to_action_view' do
- let(:subscription) { transaction.subscribe_to_action_view }
- let(:time) { Time.now }
- let(:view_data) { { identifier: 'foo.rb' } }
-
- after do
- ActiveSupport::Notifications.unsubscribe(subscription)
- end
-
- it 'tracks rendered views' do
- expect(transaction).to receive(:track_view).with('foo.rb')
-
- subscription.publish('test', time, time, nil, view_data)
- end
-
- it 'only tracks views rendered from the transaction thread' do
- expect(transaction).not_to receive(:track_view)
-
- Thread.new { subscription.publish('test', time, time, nil, view_data) }
- .join
- end
- end
-end
diff --git a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
index 2f2499753b9..9affc3d5146 100644
--- a/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status/client_middleware_spec.rb
@@ -2,11 +2,11 @@
require 'fast_spec_helper'
-RSpec.describe Gitlab::SidekiqStatus::ClientMiddleware do
+RSpec.describe Gitlab::SidekiqStatus::ClientMiddleware, :clean_gitlab_redis_queues do
describe '#call' do
context 'when the job has status_expiration set' do
- it 'tracks the job in Redis with a value of 2' do
- expect(Gitlab::SidekiqStatus).to receive(:set).with('123', 1.hour.to_i, value: 2)
+ it 'tracks the job in Redis' do
+ expect(Gitlab::SidekiqStatus).to receive(:set).with('123', 1.hour.to_i)
described_class.new
.call('Foo', { 'jid' => '123', 'status_expiration' => 1.hour.to_i }, double(:queue), double(:pool)) { nil }
@@ -14,8 +14,8 @@ RSpec.describe Gitlab::SidekiqStatus::ClientMiddleware do
end
context 'when the job does not have status_expiration set' do
- it 'tracks the job in Redis with a value of 1' do
- expect(Gitlab::SidekiqStatus).to receive(:set).with('123', Gitlab::SidekiqStatus::DEFAULT_EXPIRATION, value: 1)
+ it 'does not track the job in Redis' do
+ expect(Gitlab::SidekiqStatus).to receive(:set).with('123', nil)
described_class.new
.call('Foo', { 'jid' => '123' }, double(:queue), double(:pool)) { nil }
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index 1e7b52471b0..c94deb8e008 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
Sidekiq.redis do |redis|
expect(redis.exists(key)).to eq(true)
expect(redis.ttl(key) > 0).to eq(true)
- expect(redis.get(key)).to eq(described_class::DEFAULT_VALUE.to_s)
+ expect(redis.get(key)).to eq('1')
end
end
@@ -24,19 +24,17 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
Sidekiq.redis do |redis|
expect(redis.exists(key)).to eq(true)
expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
- expect(redis.get(key)).to eq(described_class::DEFAULT_VALUE.to_s)
+ expect(redis.get(key)).to eq('1')
end
end
- it 'allows overriding the default value' do
- described_class.set('123', value: 2)
+ it 'does not store anything with a nil expiry' do
+ described_class.set('123', nil)
key = described_class.key_for('123')
Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(true)
- expect(redis.ttl(key) > 0).to eq(true)
- expect(redis.get(key)).to eq('2')
+ expect(redis.exists(key)).to eq(false)
end
end
end
@@ -138,33 +136,5 @@ RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_
it 'handles an empty array' do
expect(described_class.job_status([])).to eq([])
end
-
- context 'when log_implicit_sidekiq_status_calls is enabled' do
- it 'logs keys that contained the default value' do
- described_class.set('123', value: 2)
- described_class.set('456')
- described_class.set('012')
-
- expect(Sidekiq.logger).to receive(:info).with(message: described_class::DEFAULT_VALUE_MESSAGE,
- keys: [described_class.key_for('456'), described_class.key_for('012')])
-
- expect(described_class.job_status(%w(123 456 789 012))).to eq([true, true, false, true])
- end
- end
-
- context 'when log_implicit_sidekiq_status_calls is disabled' do
- before do
- stub_feature_flags(log_implicit_sidekiq_status_calls: false)
- end
-
- it 'does not perform any logging' do
- described_class.set('123', value: 2)
- described_class.set('456')
-
- expect(Sidekiq.logger).not_to receive(:info)
-
- expect(described_class.job_status(%w(123 456 789))).to eq([true, true, false])
- end
- end
end
end
diff --git a/spec/lib/gitlab/sourcegraph_spec.rb b/spec/lib/gitlab/sourcegraph_spec.rb
index 6bebd1ca3e6..e2c1e959cbf 100644
--- a/spec/lib/gitlab/sourcegraph_spec.rb
+++ b/spec/lib/gitlab/sourcegraph_spec.rb
@@ -37,6 +37,12 @@ RSpec.describe Gitlab::Sourcegraph do
it { is_expected.to be_truthy }
end
+
+ context 'when feature is disabled' do
+ let(:feature_scope) { false }
+
+ it { is_expected.to be_falsey }
+ end
end
describe '.feature_enabled?' do
diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb
index e1a588a4b7d..38486b313cb 100644
--- a/spec/lib/gitlab/ssh_public_key_spec.rb
+++ b/spec/lib/gitlab/ssh_public_key_spec.rb
@@ -21,6 +21,14 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true do
end
end
+ describe '.supported_types' do
+ it 'returns array with the names of supported technologies' do
+ expect(described_class.supported_types).to eq(
+ [:rsa, :dsa, :ecdsa, :ed25519]
+ )
+ end
+ end
+
describe '.supported_sizes(name)' do
where(:name, :sizes) do
[
@@ -31,14 +39,43 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true do
]
end
- subject { described_class.supported_sizes(name) }
-
with_them do
it { expect(described_class.supported_sizes(name)).to eq(sizes) }
it { expect(described_class.supported_sizes(name.to_s)).to eq(sizes) }
end
end
+ describe '.supported_algorithms' do
+ it 'returns all supported algorithms' do
+ expect(described_class.supported_algorithms).to eq(
+ %w(
+ ssh-rsa
+ ssh-dss
+ ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521
+ ssh-ed25519
+ )
+ )
+ end
+ end
+
+ describe '.supported_algorithms_for_name' do
+ where(:name, :algorithms) do
+ [
+ [:rsa, %w(ssh-rsa)],
+ [:dsa, %w(ssh-dss)],
+ [:ecdsa, %w(ecdsa-sha2-nistp256 ecdsa-sha2-nistp384 ecdsa-sha2-nistp521)],
+ [:ed25519, %w(ssh-ed25519)]
+ ]
+ end
+
+ with_them do
+ it "returns all supported algorithms for #{params[:name]}" do
+ expect(described_class.supported_algorithms_for_name(name)).to eq(algorithms)
+ expect(described_class.supported_algorithms_for_name(name.to_s)).to eq(algorithms)
+ end
+ end
+ end
+
describe '.sanitize(key_content)' do
let(:content) { build(:key).key }
diff --git a/spec/lib/gitlab/themes_spec.rb b/spec/lib/gitlab/themes_spec.rb
index 6d03cf496b8..c9dc23d7c14 100644
--- a/spec/lib/gitlab/themes_spec.rb
+++ b/spec/lib/gitlab/themes_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Themes, lib: true do
it 'prevents an infinite loop when configuration default is invalid' do
default = described_class::APPLICATION_DEFAULT
- themes = described_class::THEMES
+ themes = described_class.available_themes
config = double(default_theme: 0).as_null_object
allow(Gitlab).to receive(:config).and_return(config)
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index 7d678db5ec8..c88b0af30f6 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -58,6 +58,10 @@ RSpec.describe Gitlab::Tracking::StandardContext do
expect(snowplow_context.to_json.dig(:data, :source)).to eq(described_class::GITLAB_RAILS_SOURCE)
end
+ it 'contains context_generated_at timestamp', :freeze_time do
+ expect(snowplow_context.to_json.dig(:data, :context_generated_at)).to eq(Time.current)
+ end
+
context 'plan' do
context 'when namespace is not available' do
it 'is nil' do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
index 0a32bdb95d3..4d84423cde4 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
let_it_be(:issues) { Issue.all }
before do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(Issue.connection).to receive(:transaction_open?).and_return(false)
end
it 'calculates a correct result' do
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
end.new(time_frame: 'all')
end
- it 'calculates a correct result' do
+ it 'calculates a correct result', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/349762' do
expect(subject.value).to be_within(Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE).percent_of(3)
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb
index c8cb1bb4373..cc4df696b37 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb
@@ -17,9 +17,25 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GenericMetric do
end
context 'when raising an exception' do
- it 'return the custom fallback' do
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
expect(ApplicationRecord.database).to receive(:version).and_raise('Error')
- expect(subject.value).to eq(custom_fallback)
+ end
+
+ context 'with should_raise_for_dev? false' do
+ let(:should_raise_for_dev) { false }
+
+ it 'return the custom fallback' do
+ expect(subject.value).to eq(custom_fallback)
+ end
+ end
+
+ context 'with should_raise_for_dev? true' do
+ let(:should_raise_for_dev) { true }
+
+ it 'raises an error' do
+ expect { subject.value }.to raise_error('Error')
+ end
end
end
end
@@ -38,9 +54,25 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GenericMetric do
end
context 'when raising an exception' do
- it 'return the default fallback' do
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
expect(ApplicationRecord.database).to receive(:version).and_raise('Error')
- expect(subject.value).to eq(described_class::FALLBACK)
+ end
+
+ context 'with should_raise_for_dev? false' do
+ let(:should_raise_for_dev) { false }
+
+ it 'return the default fallback' do
+ expect(subject.value).to eq(described_class::FALLBACK)
+ end
+ end
+
+ context 'with should_raise_for_dev? true' do
+ let(:should_raise_for_dev) { true }
+
+ it 'raises an error' do
+ expect { subject.value }.to raise_error('Error')
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 0ec805714e3..f7ff68af8a2 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -48,7 +48,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'epic_boards_usage',
'secure',
'importer',
- 'network_policies'
+ 'network_policies',
+ 'geo'
)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
index 6f201b43390..1ac344d9250 100644
--- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
@@ -13,10 +13,6 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red
end
end
- it 'includes the right events' do
- expect(described_class::KNOWN_EVENTS.size).to eq 63
- end
-
described_class::KNOWN_EVENTS.each do |event|
it_behaves_like 'usage counter with totals', event
end
@@ -24,8 +20,8 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red
describe '.fetch_supported_event' do
subject { described_class.fetch_supported_event(event_name) }
- let(:event_name) { 'package_events_i_package_composer_push_package' }
+ let(:event_name) { 'package_events_i_package_conan_push_package' }
- it { is_expected.to eq 'i_package_composer_push_package' }
+ it { is_expected.to eq 'i_package_conan_push_package' }
end
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 64eff76a9f2..a8cf87d9364 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -3,10 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::UsageDataQueries do
- before do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
- end
-
describe '#add_metric' do
let(:metric) { 'CountBoardsMetric' }
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 015ecd1671e..427e8e67090 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
stub_usage_data_connections
stub_object_store_settings
clear_memoized_values(described_class::CE_MEMOIZED_VALUES)
+ stub_database_flavor_check('Cloud SQL for PostgreSQL')
end
describe '.uncached_data' do
@@ -160,7 +161,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
another_project = create(:project, :repository, creator: another_user)
create(:remote_mirror, project: another_project, enabled: false)
create(:snippet, author: user)
- create(:suggestion, note: create(:note, project: project))
end
expect(described_class.usage_activity_by_stage_create({})).to include(
@@ -170,8 +170,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 6,
remote_mirrors: 2,
- snippets: 2,
- suggestions: 2
+ snippets: 2
)
expect(described_class.usage_activity_by_stage_create(described_class.monthly_time_range_db_params)).to include(
deploy_keys: 1,
@@ -180,8 +179,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 3,
remote_mirrors: 1,
- snippets: 1,
- suggestions: 1
+ snippets: 1
)
end
end
@@ -278,8 +276,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(described_class.usage_activity_by_stage_manage({})).to include(
{
bulk_imports: {
- gitlab_v1: 2,
- gitlab: Gitlab::UsageData::DEPRECATED_VALUE
+ gitlab_v1: 2
},
project_imports: {
bitbucket: 2,
@@ -302,32 +299,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
group_imports: {
group_import: 2,
gitlab_migration: 2
- },
- projects_imported: {
- total: Gitlab::UsageData::DEPRECATED_VALUE,
- gitlab_project: Gitlab::UsageData::DEPRECATED_VALUE,
- gitlab: Gitlab::UsageData::DEPRECATED_VALUE,
- github: Gitlab::UsageData::DEPRECATED_VALUE,
- bitbucket: Gitlab::UsageData::DEPRECATED_VALUE,
- bitbucket_server: Gitlab::UsageData::DEPRECATED_VALUE,
- gitea: Gitlab::UsageData::DEPRECATED_VALUE,
- git: Gitlab::UsageData::DEPRECATED_VALUE,
- manifest: Gitlab::UsageData::DEPRECATED_VALUE
- },
- issues_imported: {
- jira: Gitlab::UsageData::DEPRECATED_VALUE,
- fogbugz: Gitlab::UsageData::DEPRECATED_VALUE,
- phabricator: Gitlab::UsageData::DEPRECATED_VALUE,
- csv: Gitlab::UsageData::DEPRECATED_VALUE
- },
- groups_imported: Gitlab::UsageData::DEPRECATED_VALUE
+ }
}
)
expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include(
{
bulk_imports: {
- gitlab_v1: 1,
- gitlab: Gitlab::UsageData::DEPRECATED_VALUE
+ gitlab_v1: 1
},
project_imports: {
bitbucket: 1,
@@ -350,25 +328,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
group_imports: {
group_import: 1,
gitlab_migration: 1
- },
- projects_imported: {
- total: Gitlab::UsageData::DEPRECATED_VALUE,
- gitlab_project: Gitlab::UsageData::DEPRECATED_VALUE,
- gitlab: Gitlab::UsageData::DEPRECATED_VALUE,
- github: Gitlab::UsageData::DEPRECATED_VALUE,
- bitbucket: Gitlab::UsageData::DEPRECATED_VALUE,
- bitbucket_server: Gitlab::UsageData::DEPRECATED_VALUE,
- gitea: Gitlab::UsageData::DEPRECATED_VALUE,
- git: Gitlab::UsageData::DEPRECATED_VALUE,
- manifest: Gitlab::UsageData::DEPRECATED_VALUE
- },
- issues_imported: {
- jira: Gitlab::UsageData::DEPRECATED_VALUE,
- fogbugz: Gitlab::UsageData::DEPRECATED_VALUE,
- phabricator: Gitlab::UsageData::DEPRECATED_VALUE,
- csv: Gitlab::UsageData::DEPRECATED_VALUE
- },
- groups_imported: Gitlab::UsageData::DEPRECATED_VALUE
+ }
}
)
end
@@ -920,6 +880,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:database][:adapter]).to eq(ApplicationRecord.database.adapter_name)
expect(subject[:database][:version]).to eq(ApplicationRecord.database.version)
expect(subject[:database][:pg_system_id]).to eq(ApplicationRecord.database.system_id)
+ expect(subject[:database][:flavor]).to eq('Cloud SQL for PostgreSQL')
expect(subject[:mail][:smtp_server]).to eq(ActionMailer::Base.smtp_settings[:address])
expect(subject[:gitaly][:version]).to be_present
expect(subject[:gitaly][:servers]).to be >= 1
@@ -964,10 +925,25 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
context 'when retrieve component setting meets exception' do
- it 'returns -1 for component enable status' do
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
allow(Settings).to receive(:[]).with(component).and_raise(StandardError)
+ end
+
+ context 'with should_raise_for_dev? false' do
+ let(:should_raise_for_dev) { false }
+
+ it 'returns -1 for component enable status' do
+ expect(subject).to eq({ enabled: -1 })
+ end
+ end
+
+ context 'with should_raise_for_dev? true' do
+ let(:should_raise_for_dev) { true }
- expect(subject).to eq({ enabled: -1 })
+ it 'raises an error' do
+ expect { subject.value }.to raise_error(StandardError)
+ end
end
end
end
@@ -1328,6 +1304,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
+ let(:ignored_metrics) { ["i_package_composer_deploy_token_weekly"] }
+
it 'has all known_events' do
expect(subject).to have_key(:redis_hll_counters)
@@ -1337,6 +1315,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
+ metrics -= ignored_metrics
if ::Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_FOR_TOTALS.include?(category)
metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 325ace6fbbf..b44c6565538 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -5,11 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::Utils::UsageData do
include Database::DatabaseHelpers
- shared_examples 'failing hardening method' do
+ shared_examples 'failing hardening method' do |raised_exception|
+ let(:exception) { raised_exception || ActiveRecord::StatementInvalid }
+
before do
allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
stub_const("Gitlab::Utils::UsageData::FALLBACK", fallback)
- allow(failing_class).to receive(failing_method).and_raise(ActiveRecord::StatementInvalid)
+ allow(failing_class).to receive(failing_method).and_raise(exception) unless failing_class.nil?
end
context 'with should_raise_for_dev? false' do
@@ -24,7 +26,7 @@ RSpec.describe Gitlab::Utils::UsageData do
let(:should_raise_for_dev) { true }
it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
+ expect { subject }.to raise_error(exception)
end
end
end
@@ -366,8 +368,13 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(described_class.add).to eq(0)
end
- it 'returns the fallback value when adding fails' do
- expect(described_class.add(nil, 3)).to eq(-1)
+ context 'when adding fails' do
+ subject { described_class.add(nil, 3) }
+
+ let(:fallback) { -1 }
+ let(:failing_class) { nil }
+
+ it_behaves_like 'failing hardening method', StandardError
end
it 'returns the fallback value one of the arguments is negative' do
@@ -376,8 +383,13 @@ RSpec.describe Gitlab::Utils::UsageData do
end
describe '#alt_usage_data' do
- it 'returns the fallback when it gets an error' do
- expect(described_class.alt_usage_data { raise StandardError } ).to eq(-1)
+ context 'when method fails' do
+ subject { described_class.alt_usage_data { raise StandardError } }
+
+ let(:fallback) { -1 }
+ let(:failing_class) { nil }
+
+ it_behaves_like 'failing hardening method', StandardError
end
it 'returns the evaluated block when give' do
@@ -391,14 +403,22 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#redis_usage_data' do
context 'with block given' do
- it 'returns the fallback when it gets an error' do
- expect(described_class.redis_usage_data { raise ::Redis::CommandError } ).to eq(-1)
+ context 'when method fails' do
+ subject { described_class.redis_usage_data { raise ::Redis::CommandError } }
+
+ let(:fallback) { -1 }
+ let(:failing_class) { nil }
+
+ it_behaves_like 'failing hardening method', ::Redis::CommandError
end
- it 'returns the fallback when Redis HLL raises any error' do
- stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
+ context 'when Redis HLL raises any error' do
+ subject { described_class.redis_usage_data { raise Gitlab::UsageDataCounters::HLLRedisCounter::CategoryMismatch } }
+
+ let(:fallback) { 15 }
+ let(:failing_class) { nil }
- expect(described_class.redis_usage_data { raise Gitlab::UsageDataCounters::HLLRedisCounter::CategoryMismatch } ).to eq(15)
+ it_behaves_like 'failing hardening method', Gitlab::UsageDataCounters::HLLRedisCounter::CategoryMismatch
end
it 'returns the evaluated block when given' do
@@ -407,9 +427,14 @@ RSpec.describe Gitlab::Utils::UsageData do
end
context 'with counter given' do
- it 'returns the falback values for all counter keys when it gets an error' do
- allow(::Gitlab::UsageDataCounters::WikiPageCounter).to receive(:totals).and_raise(::Redis::CommandError)
- expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter)).to eql(::Gitlab::UsageDataCounters::WikiPageCounter.fallback_totals)
+ context 'when gets an error' do
+ subject { described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter) }
+
+ let(:fallback) { ::Gitlab::UsageDataCounters::WikiPageCounter.fallback_totals }
+ let(:failing_class) { ::Gitlab::UsageDataCounters::WikiPageCounter }
+ let(:failing_method) { :totals }
+
+ it_behaves_like 'failing hardening method', ::Redis::CommandError
end
it 'returns the totals when couter is given' do
diff --git a/spec/lib/gitlab/web_hooks/recursion_detection_spec.rb b/spec/lib/gitlab/web_hooks/recursion_detection_spec.rb
new file mode 100644
index 00000000000..45170864967
--- /dev/null
+++ b/spec/lib/gitlab/web_hooks/recursion_detection_spec.rb
@@ -0,0 +1,221 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WebHooks::RecursionDetection, :clean_gitlab_redis_shared_state, :request_store do
+ let_it_be(:web_hook) { create(:project_hook) }
+
+ let!(:uuid_class) { described_class::UUID }
+
+ describe '.set_from_headers' do
+ let(:old_uuid) { SecureRandom.uuid }
+ let(:rack_headers) { Rack::MockRequest.env_for("/").merge(headers) }
+
+ subject(:set_from_headers) { described_class.set_from_headers(rack_headers) }
+
+ # Note, having a previous `request_uuid` value set before `.set_from_headers` is
+ # called is contrived and should not normally happen. However, testing with this scenario
+ # allows us to assert the ideal outcome if it ever were to happen.
+ before do
+ uuid_class.instance.request_uuid = old_uuid
+ end
+
+ context 'when the detection header is present' do
+ let(:new_uuid) { SecureRandom.uuid }
+
+ let(:headers) do
+ { uuid_class::HEADER => new_uuid }
+ end
+
+ it 'sets the request UUID value from the headers' do
+ set_from_headers
+
+ expect(uuid_class.instance.request_uuid).to eq(new_uuid)
+ end
+ end
+
+ context 'when detection header is not present' do
+ let(:headers) { {} }
+
+ it 'does not set the request UUID' do
+ set_from_headers
+
+ expect(uuid_class.instance.request_uuid).to eq(old_uuid)
+ end
+ end
+ end
+
+ describe '.set_request_uuid' do
+ it 'sets the request UUID value' do
+ new_uuid = SecureRandom.uuid
+
+ described_class.set_request_uuid(new_uuid)
+
+ expect(uuid_class.instance.request_uuid).to eq(new_uuid)
+ end
+ end
+
+ describe '.register!' do
+ let_it_be(:second_web_hook) { create(:project_hook) }
+ let_it_be(:third_web_hook) { create(:project_hook) }
+
+ def cache_key(hook)
+ described_class.send(:cache_key_for_hook, hook)
+ end
+
+ it 'stores IDs in the same cache when a request UUID is set, until the request UUID changes', :aggregate_failures do
+ # Register web_hook and second_web_hook against the same request UUID.
+ uuid_class.instance.request_uuid = SecureRandom.uuid
+ described_class.register!(web_hook)
+ described_class.register!(second_web_hook)
+ first_cache_key = cache_key(web_hook)
+ second_cache_key = cache_key(second_web_hook)
+
+ # Register third_web_hook against a new request UUID.
+ uuid_class.instance.request_uuid = SecureRandom.uuid
+ described_class.register!(third_web_hook)
+ third_cache_key = cache_key(third_web_hook)
+
+ expect(first_cache_key).to eq(second_cache_key)
+ expect(second_cache_key).not_to eq(third_cache_key)
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ members = redis.smembers(first_cache_key).map(&:to_i)
+ expect(members).to contain_exactly(web_hook.id, second_web_hook.id)
+
+ members = redis.smembers(third_cache_key).map(&:to_i)
+ expect(members).to contain_exactly(third_web_hook.id)
+ end
+ end
+
+ it 'stores IDs in unique caches when no request UUID is present', :aggregate_failures do
+ described_class.register!(web_hook)
+ described_class.register!(second_web_hook)
+ described_class.register!(third_web_hook)
+
+ first_cache_key = cache_key(web_hook)
+ second_cache_key = cache_key(second_web_hook)
+ third_cache_key = cache_key(third_web_hook)
+
+ expect([first_cache_key, second_cache_key, third_cache_key].compact.length).to eq(3)
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ members = redis.smembers(first_cache_key).map(&:to_i)
+ expect(members).to contain_exactly(web_hook.id)
+
+ members = redis.smembers(second_cache_key).map(&:to_i)
+ expect(members).to contain_exactly(second_web_hook.id)
+
+ members = redis.smembers(third_cache_key).map(&:to_i)
+ expect(members).to contain_exactly(third_web_hook.id)
+ end
+ end
+
+ it 'touches the storage ttl each time it is called', :aggregate_failures do
+ freeze_time do
+ described_class.register!(web_hook)
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(cache_key(web_hook))).to eq(described_class::TOUCH_CACHE_TTL.to_i)
+ end
+ end
+
+ travel_to(1.minute.from_now) do
+ described_class.register!(second_web_hook)
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(cache_key(web_hook))).to eq(described_class::TOUCH_CACHE_TTL.to_i)
+ end
+ end
+ end
+ end
+
+ describe 'block?' do
+ let_it_be(:registered_web_hooks) { create_list(:project_hook, 2) }
+
+ subject(:block?) { described_class.block?(web_hook) }
+
+ before do
+ # Register some previous webhooks.
+ uuid_class.instance.request_uuid = SecureRandom.uuid
+
+ registered_web_hooks.each do |web_hook|
+ described_class.register!(web_hook)
+ end
+ end
+
+ it 'returns false if webhook should not be blocked' do
+ is_expected.to eq(false)
+ end
+
+ context 'when the webhook has previously fired' do
+ before do
+ described_class.register!(web_hook)
+ end
+
+ it 'returns true' do
+ is_expected.to eq(true)
+ end
+
+ context 'when the request UUID changes again' do
+ before do
+ uuid_class.instance.request_uuid = SecureRandom.uuid
+ end
+
+ it 'returns false' do
+ is_expected.to eq(false)
+ end
+ end
+ end
+
+ context 'when the count limit has been reached' do
+ let_it_be(:registered_web_hooks) { create_list(:project_hook, 2) }
+
+ before do
+ registered_web_hooks.each do |web_hook|
+ described_class.register!(web_hook)
+ end
+
+ stub_const("#{described_class.name}::COUNT_LIMIT", registered_web_hooks.size)
+ end
+
+ it 'returns true' do
+ is_expected.to eq(true)
+ end
+
+ context 'when the request UUID changes again' do
+ before do
+ uuid_class.instance.request_uuid = SecureRandom.uuid
+ end
+
+ it 'returns false' do
+ is_expected.to eq(false)
+ end
+ end
+ end
+ end
+
+ describe '.header' do
+ subject(:header) { described_class.header(web_hook) }
+
+ it 'returns a header with the UUID value' do
+ uuid = SecureRandom.uuid
+ allow(uuid_class.instance).to receive(:uuid_for_hook).and_return(uuid)
+
+ is_expected.to eq({ uuid_class::HEADER => uuid })
+ end
+ end
+
+ describe '.to_log' do
+ subject(:to_log) { described_class.to_log(web_hook) }
+
+ it 'returns the UUID value and all registered webhook IDs in a Hash' do
+ uuid = SecureRandom.uuid
+ allow(uuid_class.instance).to receive(:uuid_for_hook).and_return(uuid)
+ registered_web_hooks = create_list(:project_hook, 2)
+ registered_web_hooks.each { described_class.register!(_1) }
+
+ is_expected.to eq({ uuid: uuid, ids: registered_web_hooks.map(&:id) })
+ end
+ end
+end
diff --git a/spec/lib/gitlab_edition_spec.rb b/spec/lib/gitlab_edition_spec.rb
new file mode 100644
index 00000000000..2f1316819ec
--- /dev/null
+++ b/spec/lib/gitlab_edition_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabEdition do
+ before do
+ # Make sure the ENV is clean
+ stub_env('FOSS_ONLY', nil)
+ stub_env('EE_ONLY', nil)
+
+ described_class.instance_variable_set(:@is_ee, nil)
+ described_class.instance_variable_set(:@is_jh, nil)
+ end
+
+ after do
+ described_class.instance_variable_set(:@is_ee, nil)
+ described_class.instance_variable_set(:@is_jh, nil)
+ end
+
+ describe '.root' do
+ it 'returns the root path of the app' do
+ expect(described_class.root).to eq(Pathname.new(File.expand_path('../..', __dir__)))
+ end
+ end
+
+ describe 'extensions' do
+ context 'when .jh? is true' do
+ before do
+ allow(described_class).to receive(:jh?).and_return(true)
+ end
+
+ it 'returns %w[ee jh]' do
+ expect(described_class.extensions).to match_array(%w[ee jh])
+ end
+ end
+
+ context 'when .ee? is true' do
+ before do
+ allow(described_class).to receive(:jh?).and_return(false)
+ allow(described_class).to receive(:ee?).and_return(true)
+ end
+
+ it 'returns %w[ee]' do
+ expect(described_class.extensions).to match_array(%w[ee])
+ end
+ end
+
+ context 'when neither .jh? and .ee? are true' do
+ before do
+ allow(described_class).to receive(:jh?).and_return(false)
+ allow(described_class).to receive(:ee?).and_return(false)
+ end
+
+ it 'returns the exyensions according to the current edition' do
+ expect(described_class.extensions).to be_empty
+ end
+ end
+ end
+
+ describe '.ee? and .jh?' do
+ def stub_path(*paths, **arguments)
+ root = Pathname.new('dummy')
+ pathname = double(:path, **arguments)
+
+ allow(described_class)
+ .to receive(:root)
+ .and_return(root)
+
+ allow(root).to receive(:join)
+
+ paths.each do |path|
+ allow(root)
+ .to receive(:join)
+ .with(path)
+ .and_return(pathname)
+ end
+ end
+
+ describe '.ee?' do
+ context 'for EE' do
+ before do
+ stub_path('ee/app/models/license.rb', exist?: true)
+ end
+
+ context 'when using FOSS_ONLY=1' do
+ before do
+ stub_env('FOSS_ONLY', '1')
+ end
+
+ it 'returns not to be EE' do
+ expect(described_class).not_to be_ee
+ end
+ end
+
+ context 'when using FOSS_ONLY=0' do
+ before do
+ stub_env('FOSS_ONLY', '0')
+ end
+
+ it 'returns to be EE' do
+ expect(described_class).to be_ee
+ end
+ end
+
+ context 'when using default FOSS_ONLY' do
+ it 'returns to be EE' do
+ expect(described_class).to be_ee
+ end
+ end
+ end
+
+ context 'for CE' do
+ before do
+ stub_path('ee/app/models/license.rb', exist?: false)
+ end
+
+ it 'returns not to be EE' do
+ expect(described_class).not_to be_ee
+ end
+ end
+ end
+
+ describe '.jh?' do
+ context 'for JH' do
+ before do
+ stub_path(
+ 'ee/app/models/license.rb',
+ 'jh',
+ exist?: true)
+ end
+
+ context 'when using default FOSS_ONLY and EE_ONLY' do
+ it 'returns to be JH' do
+ expect(described_class).to be_jh
+ end
+ end
+
+ context 'when using FOSS_ONLY=1' do
+ before do
+ stub_env('FOSS_ONLY', '1')
+ end
+
+ it 'returns not to be JH' do
+ expect(described_class).not_to be_jh
+ end
+ end
+
+ context 'when using EE_ONLY=1' do
+ before do
+ stub_env('EE_ONLY', '1')
+ end
+
+ it 'returns not to be JH' do
+ expect(described_class).not_to be_jh
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index 869eaf26772..49ba4debe31 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -3,9 +3,19 @@
require 'spec_helper'
RSpec.describe Gitlab do
- describe '.root' do
- it 'returns the root path of the app' do
- expect(described_class.root).to eq(Pathname.new(File.expand_path('../..', __dir__)))
+ %w[root extensions ee? jh?].each do |method_name|
+ it "delegates #{method_name} to GitlabEdition" do
+ expect(GitlabEdition).to receive(method_name)
+
+ described_class.public_send(method_name)
+ end
+ end
+
+ %w[ee jh].each do |method_name|
+ it "delegates #{method_name} to GitlabEdition" do
+ expect(GitlabEdition).to receive(method_name)
+
+ described_class.public_send(method_name) {}
end
end
@@ -248,121 +258,6 @@ RSpec.describe Gitlab do
end
end
- describe 'ee? and jh?' do
- before do
- # Make sure the ENV is clean
- stub_env('FOSS_ONLY', nil)
- stub_env('EE_ONLY', nil)
-
- described_class.instance_variable_set(:@is_ee, nil)
- described_class.instance_variable_set(:@is_jh, nil)
- end
-
- after do
- described_class.instance_variable_set(:@is_ee, nil)
- described_class.instance_variable_set(:@is_jh, nil)
- end
-
- def stub_path(*paths, **arguments)
- root = Pathname.new('dummy')
- pathname = double(:path, **arguments)
-
- allow(described_class)
- .to receive(:root)
- .and_return(root)
-
- allow(root).to receive(:join)
-
- paths.each do |path|
- allow(root)
- .to receive(:join)
- .with(path)
- .and_return(pathname)
- end
- end
-
- describe '.ee?' do
- context 'for EE' do
- before do
- stub_path('ee/app/models/license.rb', exist?: true)
- end
-
- context 'when using FOSS_ONLY=1' do
- before do
- stub_env('FOSS_ONLY', '1')
- end
-
- it 'returns not to be EE' do
- expect(described_class).not_to be_ee
- end
- end
-
- context 'when using FOSS_ONLY=0' do
- before do
- stub_env('FOSS_ONLY', '0')
- end
-
- it 'returns to be EE' do
- expect(described_class).to be_ee
- end
- end
-
- context 'when using default FOSS_ONLY' do
- it 'returns to be EE' do
- expect(described_class).to be_ee
- end
- end
- end
-
- context 'for CE' do
- before do
- stub_path('ee/app/models/license.rb', exist?: false)
- end
-
- it 'returns not to be EE' do
- expect(described_class).not_to be_ee
- end
- end
- end
-
- describe '.jh?' do
- context 'for JH' do
- before do
- stub_path(
- 'ee/app/models/license.rb',
- 'jh',
- exist?: true)
- end
-
- context 'when using default FOSS_ONLY and EE_ONLY' do
- it 'returns to be JH' do
- expect(described_class).to be_jh
- end
- end
-
- context 'when using FOSS_ONLY=1' do
- before do
- stub_env('FOSS_ONLY', '1')
- end
-
- it 'returns not to be JH' do
- expect(described_class).not_to be_jh
- end
- end
-
- context 'when using EE_ONLY=1' do
- before do
- stub_env('EE_ONLY', '1')
- end
-
- it 'returns not to be JH' do
- expect(described_class).not_to be_jh
- end
- end
- end
- end
- end
-
describe '.http_proxy_env?' do
it 'returns true when lower case https' do
stub_env('https_proxy', 'https://my.proxy')
diff --git a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
index 314c4cdc602..252da8ea699 100644
--- a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
@@ -56,6 +56,12 @@ RSpec.describe Sidebars::Groups::Menus::SettingsMenu do
it_behaves_like 'access rights checks'
end
+ describe 'Access Tokens' do
+ let(:item_id) { :access_tokens }
+
+ it_behaves_like 'access rights checks'
+ end
+
describe 'Repository menu' do
let(:item_id) { :repository }
diff --git a/spec/lib/sidebars/projects/panel_spec.rb b/spec/lib/sidebars/projects/panel_spec.rb
index 2e79ced7039..7e69a2dfe52 100644
--- a/spec/lib/sidebars/projects/panel_spec.rb
+++ b/spec/lib/sidebars/projects/panel_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Sidebars::Projects::Panel do
- let(:project) { build(:project) }
+ let_it_be(:project) { create(:project) }
+
let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
subject { described_class.new(context) }
diff --git a/spec/lib/version_check_spec.rb b/spec/lib/version_check_spec.rb
index d7a772a3f7e..736a8f9595e 100644
--- a/spec/lib/version_check_spec.rb
+++ b/spec/lib/version_check_spec.rb
@@ -3,12 +3,6 @@
require 'spec_helper'
RSpec.describe VersionCheck do
- describe '.image_url' do
- it 'returns the correct URL' do
- expect(described_class.image_url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.svg\?gitlab_info=\w+})
- end
- end
-
describe '.url' do
it 'returns the correct URL' do
expect(described_class.url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.json\?gitlab_info=\w+})