summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-09-20 13:18:24 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-09-20 13:18:24 +0000
commit0653e08efd039a5905f3fa4f6e9cef9f5d2f799c (patch)
tree4dcc884cf6d81db44adae4aa99f8ec1233a41f55 /spec/lib
parent744144d28e3e7fddc117924fef88de5d9674fe4c (diff)
downloadgitlab-ce-0653e08efd039a5905f3fa4f6e9cef9f5d2f799c.tar.gz
Add latest changes from gitlab-org/gitlab@14-3-stable-eev14.3.0-rc42
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/clusters/agent_authorization_spec.rb17
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb15
-rw-r--r--spec/lib/backup/manager_spec.rb71
-rw-r--r--spec/lib/banzai/filter/audio_link_filter_spec.rb16
-rw-r--r--spec/lib/banzai/filter/video_link_filter_spec.rb16
-rw-r--r--spec/lib/banzai/reference_parser/base_parser_spec.rb12
-rw-r--r--spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb6
-rw-r--r--spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb6
-rw-r--r--spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb6
-rw-r--r--spec/lib/banzai/reference_parser/project_parser_spec.rb8
-rw-r--r--spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb (renamed from spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb)2
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb43
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb69
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb (renamed from spec/lib/bulk_imports/stage_spec.rb)23
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb95
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb18
-rw-r--r--spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb83
-rw-r--r--spec/lib/error_tracking/collector/dsn_spec.rb26
-rw-r--r--spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb20
-rw-r--r--spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb94
-rw-r--r--spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb45
-rw-r--r--spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb14
-rw-r--r--spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb50
-rw-r--r--spec/lib/gitlab/changelog/config_spec.rb110
-rw-r--r--spec/lib/gitlab/changelog/release_spec.rb24
-rw-r--r--spec/lib/gitlab/chat/command_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/changes_access_spec.rb153
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb103
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/entry/tags_spec.rb63
-rw-r--r--spec/lib/gitlab/ci/cron_parser_spec.rb61
-rw-r--r--spec/lib/gitlab/ci/parsers/security/common_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb108
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb101
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/pipeline/metrics_spec.rb27
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/reports/security/flag_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/trace/backoff_spec.rb62
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/variables/collection/sort_spec.rb44
-rw-r--r--spec/lib/gitlab/ci/variables/collection_spec.rb99
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb24
-rw-r--r--spec/lib/gitlab/config/loader/yaml_spec.rb18
-rw-r--r--spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb117
-rw-r--r--spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb17
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb36
-rw-r--r--spec/lib/gitlab/database/connection_spec.rb81
-rw-r--r--spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb30
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb175
-rw-r--r--spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb40
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_list_spec.rb7
-rw-r--r--spec/lib/gitlab/database/load_balancing/host_spec.rb7
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb46
-rw-r--r--spec/lib/gitlab/database/load_balancing/primary_host_spec.rb126
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb108
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb71
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb37
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb233
-rw-r--r--spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb58
-rw-r--r--spec/lib/gitlab/database/migration_helpers/v2_spec.rb104
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb105
-rw-r--r--spec/lib/gitlab/database/migration_spec.rb68
-rw-r--r--spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb129
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb29
-rw-r--r--spec/lib/gitlab/database/partitioning/multi_database_partition_manager_spec.rb36
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb80
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb11
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb21
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb36
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb8
-rw-r--r--spec/lib/gitlab/database/schema_migrations/context_spec.rb8
-rw-r--r--spec/lib/gitlab/database/shared_model_spec.rb55
-rw-r--r--spec/lib/gitlab/database/transaction/context_spec.rb42
-rw-r--r--spec/lib/gitlab/database/transaction/observer_spec.rb3
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb54
-rw-r--r--spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb9
-rw-r--r--spec/lib/gitlab/database_spec.rb50
-rw-r--r--spec/lib/gitlab/devise_failure_spec.rb35
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb9
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb9
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb30
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb20
-rw-r--r--spec/lib/gitlab/experimentation/experiment_spec.rb1
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb103
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb36
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb288
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb14
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb107
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb23
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb35
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb63
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb13
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb1
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb75
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb74
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb75
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb66
-rw-r--r--spec/lib/gitlab/github_import/representation/diff_note_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/sequential_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb12
-rw-r--r--spec/lib/gitlab/github_import_spec.rb49
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml13
-rw-r--r--spec/lib/gitlab/import_export/attributes_permitter_spec.rb69
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml2
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb26
-rw-r--r--spec/lib/gitlab/instrumentation/redis_spec.rb7
-rw-r--r--spec/lib/gitlab/issuables_count_for_state_spec.rb102
-rw-r--r--spec/lib/gitlab/issues/rebalancing/state_spec.rb223
-rw-r--r--spec/lib/gitlab/kas/client_spec.rb27
-rw-r--r--spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb40
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb48
-rw-r--r--spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb3
-rw-r--r--spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb21
-rw-r--r--spec/lib/gitlab/pagination/keyset/cursor_based_request_context_spec.rb68
-rw-r--r--spec/lib/gitlab/pagination/keyset/cursor_pager_spec.rb63
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/array_scope_columns_spec.rb19
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/column_data_spec.rb23
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_columns_spec.rb37
-rw-r--r--spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb225
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb41
-rw-r--r--spec/lib/gitlab/pagination/offset_pagination_spec.rb37
-rw-r--r--spec/lib/gitlab/patch/legacy_database_config_spec.rb123
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb10
-rw-r--r--spec/lib/gitlab/rack_attack/request_spec.rb16
-rw-r--r--spec/lib/gitlab/rack_attack_spec.rb26
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb55
-rw-r--r--spec/lib/gitlab/regex_spec.rb21
-rw-r--r--spec/lib/gitlab/repository_cache/preloader_spec.rb54
-rw-r--r--spec/lib/gitlab/search_results_spec.rb4
-rw-r--r--spec/lib/gitlab/seeder_spec.rb33
-rw-r--r--spec/lib/gitlab/sidekiq_cluster/cli_spec.rb12
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb252
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb103
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb8
-rw-r--r--spec/lib/gitlab/sidekiq_queue_spec.rb30
-rw-r--r--spec/lib/gitlab/tracking/snowplow_schema_validation_spec.rb33
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb22
-rw-r--r--spec/lib/gitlab/tracking_spec.rb8
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb21
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb11
-rw-r--r--spec/lib/gitlab/usage/metric_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/service_ping_features_metric_spec.rb20
-rw-r--r--spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb17
-rw-r--r--spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb30
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb9
-rw-r--r--spec/lib/gitlab/x509/tag_spec.rb18
-rw-r--r--spec/lib/gitlab/zentao/client_spec.rb105
-rw-r--r--spec/lib/marginalia_spec.rb36
-rw-r--r--spec/lib/object_storage/config_spec.rb4
-rw-r--r--spec/lib/sidebars/menu_spec.rb23
-rw-r--r--spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb11
-rw-r--r--spec/lib/sidebars/projects/menus/monitor_menu_spec.rb19
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb26
-rw-r--r--spec/lib/system_check/incoming_email_check_spec.rb54
166 files changed, 6143 insertions, 1316 deletions
diff --git a/spec/lib/api/entities/clusters/agent_authorization_spec.rb b/spec/lib/api/entities/clusters/agent_authorization_spec.rb
new file mode 100644
index 00000000000..101a8af4ac4
--- /dev/null
+++ b/spec/lib/api/entities/clusters/agent_authorization_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Clusters::AgentAuthorization do
+ let_it_be(:authorization) { create(:agent_group_authorization) }
+
+ subject { described_class.new(authorization).as_json }
+
+ it 'includes basic fields' do
+ expect(subject).to include(
+ id: authorization.agent_id,
+ config_project: a_hash_including(id: authorization.agent.project_id),
+ configuration: authorization.config
+ )
+ end
+end
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index a48a1752eff..7797bd12f0e 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -131,8 +131,19 @@ RSpec.describe Backup::GitalyBackup do
context 'parallel option set' do
let(:parallel) { 3 }
- it 'does not pass parallel option through' do
- expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything).and_call_original
+ it 'passes parallel option through' do
+ expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
+
+ subject.start(:restore)
+ subject.wait
+ end
+ end
+
+ context 'parallel_storage option set' do
+ let(:parallel_storage) { 3 }
+
+ it 'passes parallel option through' do
+ expect(Open3).to receive(:popen2).with(ENV, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:restore)
subject.wait
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 2cc1bf41d18..32eea82cfdf 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -432,6 +432,77 @@ RSpec.describe Backup::Manager do
end
end
+ context 'with AWS with server side encryption' do
+ let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
+ let(:encryption_key) { nil }
+ let(:encryption) { nil }
+ let(:storage_options) { nil }
+
+ before do
+ stub_backup_setting(
+ upload: {
+ connection: {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
+ },
+ remote_directory: 'directory',
+ multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
+ encryption: encryption,
+ encryption_key: encryption_key,
+ storage_options: storage_options,
+ storage_class: nil
+ }
+ )
+
+ connection.directories.create(key: Gitlab.config.backup.upload.remote_directory)
+ end
+
+ context 'with SSE-S3 without using storage_options' do
+ let(:encryption) { 'AES256' }
+
+ it 'sets encryption attributes' do
+ result = subject.upload
+
+ expect(result.key).to be_present
+ expect(result.encryption).to eq('AES256')
+ expect(result.encryption_key).to be_nil
+ expect(result.kms_key_id).to be_nil
+ end
+ end
+
+ context 'with SSE-C (customer-provided keys) options' do
+ let(:encryption) { 'AES256' }
+ let(:encryption_key) { SecureRandom.hex }
+
+ it 'sets encryption attributes' do
+ result = subject.upload
+
+ expect(result.key).to be_present
+ expect(result.encryption).to eq(encryption)
+ expect(result.encryption_key).to eq(encryption_key)
+ expect(result.kms_key_id).to be_nil
+ end
+ end
+
+ context 'with SSE-KMS options' do
+ let(:storage_options) do
+ {
+ server_side_encryption: 'aws:kms',
+ server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
+ }
+ end
+
+ it 'sets encryption attributes' do
+ result = subject.upload
+
+ expect(result.key).to be_present
+ expect(result.encryption).to eq('aws:kms')
+ expect(result.kms_key_id).to eq('arn:aws:kms:12345')
+ end
+ end
+ end
+
context 'with Google provider' do
before do
stub_backup_setting(
diff --git a/spec/lib/banzai/filter/audio_link_filter_spec.rb b/spec/lib/banzai/filter/audio_link_filter_spec.rb
index 4198a50e980..71e069eb29f 100644
--- a/spec/lib/banzai/filter/audio_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/audio_link_filter_spec.rb
@@ -25,18 +25,14 @@ RSpec.describe Banzai::Filter::AudioLinkFilter do
it 'replaces the image tag with an audio tag' do
container = filter(image).children.first
- expect(container.name).to eq 'div'
- expect(container['class']).to eq 'audio-container'
+ expect(container.name).to eq 'span'
+ expect(container['class']).to eq 'media-container audio-container'
- audio, paragraph = container.children
+ audio, link = container.children
expect(audio.name).to eq 'audio'
expect(audio['src']).to eq src
- expect(paragraph.name).to eq 'p'
-
- link = paragraph.children.first
-
expect(link.name).to eq 'a'
expect(link['href']).to eq src
expect(link['target']).to eq '_blank'
@@ -105,15 +101,13 @@ RSpec.describe Banzai::Filter::AudioLinkFilter do
image = %(<img src="#{proxy_src}" data-canonical-src="#{canonical_src}"/>)
container = filter(image).children.first
- expect(container['class']).to eq 'audio-container'
+ expect(container['class']).to eq 'media-container audio-container'
- audio, paragraph = container.children
+ audio, link = container.children
expect(audio['src']).to eq proxy_src
expect(audio['data-canonical-src']).to eq canonical_src
- link = paragraph.children.first
-
expect(link['href']).to eq proxy_src
end
end
diff --git a/spec/lib/banzai/filter/video_link_filter_spec.rb b/spec/lib/banzai/filter/video_link_filter_spec.rb
index ec954aa9163..a0b0ba309f5 100644
--- a/spec/lib/banzai/filter/video_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/video_link_filter_spec.rb
@@ -25,20 +25,16 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do
it 'replaces the image tag with a video tag' do
container = filter(image).children.first
- expect(container.name).to eq 'div'
- expect(container['class']).to eq 'video-container'
+ expect(container.name).to eq 'span'
+ expect(container['class']).to eq 'media-container video-container'
- video, paragraph = container.children
+ video, link = container.children
expect(video.name).to eq 'video'
expect(video['src']).to eq src
expect(video['width']).to eq "400"
expect(video['preload']).to eq 'metadata'
- expect(paragraph.name).to eq 'p'
-
- link = paragraph.children.first
-
expect(link.name).to eq 'a'
expect(link['href']).to eq src
expect(link['target']).to eq '_blank'
@@ -107,15 +103,13 @@ RSpec.describe Banzai::Filter::VideoLinkFilter do
image = %(<img src="#{proxy_src}" data-canonical-src="#{canonical_src}"/>)
container = filter(image).children.first
- expect(container['class']).to eq 'video-container'
+ expect(container['class']).to eq 'media-container video-container'
- video, paragraph = container.children
+ video, link = container.children
expect(video['src']).to eq proxy_src
expect(video['data-canonical-src']).to eq canonical_src
- link = paragraph.children.first
-
expect(link['href']).to eq proxy_src
end
end
diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb
index 095500cdc53..4701caa0667 100644
--- a/spec/lib/banzai/reference_parser/base_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb
@@ -247,15 +247,15 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
end
end
- it 'returns referenceable and visible objects, alongside nodes that are referenceable but not visible' do
- expect(subject.gather_references(nodes)).to match(
- visible: contain_exactly(6, 8, 10),
- not_visible: match_array(nodes.select { |n| n.id.even? && n.id <= 5 })
- )
+ it 'returns referenceable and visible objects, alongside all and visible nodes' do
+ referenceable = nodes.select { |n| n.id.even? }
+ visible = nodes.select { |n| [6, 8, 10].include?(n.id) }
+
+ expect_gathered_references(subject.gather_references(nodes), [6, 8, 10], referenceable, visible)
end
it 'is always empty if the input is empty' do
- expect(subject.gather_references([])) .to match(visible: be_empty, not_visible: be_empty)
+ expect_gathered_references(subject.gather_references([]), [], [], [])
end
end
diff --git a/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb b/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb
index 4610da7cbe6..576e629d271 100644
--- a/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/mentioned_group_parser_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedGroupParser do
it 'returns empty array' do
link['data-group'] = project.group.id.to_s
- expect_gathered_references(subject.gather_references([link]), [], 1)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [])
end
end
@@ -30,7 +30,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedGroupParser do
end
it 'returns groups' do
- expect_gathered_references(subject.gather_references([link]), [group], 0)
+ expect_gathered_references(subject.gather_references([link]), [group], [link], [link])
end
end
@@ -38,7 +38,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedGroupParser do
it 'returns an empty Array' do
link['data-group'] = 'test-non-existing'
- expect_gathered_references(subject.gather_references([link]), [], 1)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [])
end
end
end
diff --git a/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb b/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb
index 7eb58ee40d3..983407addce 100644
--- a/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/mentioned_project_parser_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedProjectParser do
it 'returns empty Array' do
link['data-project'] = project.id.to_s
- expect_gathered_references(subject.gather_references([link]), [], 1)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [])
end
end
@@ -30,7 +30,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedProjectParser do
end
it 'returns an Array of referenced projects' do
- expect_gathered_references(subject.gather_references([link]), [project], 0)
+ expect_gathered_references(subject.gather_references([link]), [project], [link], [link])
end
end
@@ -38,7 +38,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedProjectParser do
it 'returns an empty Array' do
link['data-project'] = 'inexisting-project-id'
- expect_gathered_references(subject.gather_references([link]), [], 1)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [])
end
end
end
diff --git a/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb b/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb
index 4be07866db1..f117d796dad 100644
--- a/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/mentioned_user_parser_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedUserParser do
end
it 'returns empty list of users' do
- expect_gathered_references(subject.gather_references([link]), [], 0)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [link])
end
end
end
@@ -35,7 +35,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedUserParser do
end
it 'returns empty list of users' do
- expect_gathered_references(subject.gather_references([link]), [], 0)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [link])
end
end
end
@@ -44,7 +44,7 @@ RSpec.describe Banzai::ReferenceParser::MentionedUserParser do
it 'returns an Array of users' do
link['data-user'] = user.id.to_s
- expect_gathered_references(subject.gather_references([link]), [user], 0)
+ expect_gathered_references(subject.gather_references([link]), [user], [link], [link])
end
end
end
diff --git a/spec/lib/banzai/reference_parser/project_parser_spec.rb b/spec/lib/banzai/reference_parser/project_parser_spec.rb
index 6358a04f12a..2c0b6c417b0 100644
--- a/spec/lib/banzai/reference_parser/project_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/project_parser_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Banzai::ReferenceParser::ProjectParser do
it 'returns an Array of projects' do
link['data-project'] = project.id.to_s
- expect_gathered_references(subject.gather_references([link]), [project], 0)
+ expect_gathered_references(subject.gather_references([link]), [project], [link], [link])
end
end
@@ -25,7 +25,7 @@ RSpec.describe Banzai::ReferenceParser::ProjectParser do
it 'returns an empty Array' do
link['data-project'] = ''
- expect_gathered_references(subject.gather_references([link]), [], 1)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [])
end
end
@@ -35,7 +35,7 @@ RSpec.describe Banzai::ReferenceParser::ProjectParser do
link['data-project'] = private_project.id.to_s
- expect_gathered_references(subject.gather_references([link]), [], 1)
+ expect_gathered_references(subject.gather_references([link]), [], [link], [])
end
it 'returns an Array when authorized' do
@@ -43,7 +43,7 @@ RSpec.describe Banzai::ReferenceParser::ProjectParser do
link['data-project'] = private_project.id.to_s
- expect_gathered_references(subject.gather_references([link]), [private_project], 0)
+ expect_gathered_references(subject.gather_references([link]), [private_project], [link], [link])
end
end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
index b97aeb435b9..c1a9ea7b7e2 100644
--- a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/entity_finisher_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Groups::Pipelines::EntityFinisher do
+RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do
it 'updates the entity status to finished' do
entity = create(:bulk_import_entity, :started)
pipeline_tracker = create(:bulk_import_tracker, entity: entity)
diff --git a/spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb
new file mode 100644
index 00000000000..1a7c5a4993c
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Graphql::GetProjectsQuery do
+ describe '#variables' do
+ it 'returns valid variables based on entity information' do
+ tracker = create(:bulk_import_tracker)
+ context = BulkImports::Pipeline::Context.new(tracker)
+
+ query = GraphQL::Query.new(
+ GitlabSchema,
+ described_class.to_s,
+ variables: described_class.variables(context)
+ )
+ result = GitlabSchema.static_validator.validate(query)
+
+ expect(result[:errors]).to be_empty
+ end
+
+ context 'with invalid variables' do
+ it 'raises an error' do
+ expect { GraphQL::Query.new(GitlabSchema, described_class.to_s, variables: 'invalid') }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ describe '#data_path' do
+ it 'returns data path' do
+ expected = %w[data group projects nodes]
+
+ expect(described_class.data_path).to eq(expected)
+ end
+ end
+
+ describe '#page_info_path' do
+ it 'returns pagination information path' do
+ expected = %w[data group projects page_info]
+
+ expect(described_class.page_info_path).to eq(expected)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
new file mode 100644
index 00000000000..5b6c93e695f
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/project_entities_pipeline_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:destination_group) { create(:group) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ group: destination_group,
+ destination_namespace: destination_group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:extracted_data) do
+ BulkImports::Pipeline::ExtractedData.new(data: {
+ 'name' => 'project',
+ 'full_path' => 'group/project'
+ })
+ end
+
+ subject { described_class.new(context) }
+
+ describe '#run' do
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(extracted_data)
+ end
+
+ destination_group.add_owner(user)
+ end
+
+ it 'creates project entity' do
+ expect { subject.run }.to change(BulkImports::Entity, :count).by(1)
+
+ project_entity = BulkImports::Entity.last
+
+ expect(project_entity.source_type).to eq('project_entity')
+ expect(project_entity.source_full_path).to eq('group/project')
+ expect(project_entity.destination_name).to eq('project')
+ expect(project_entity.destination_namespace).to eq(destination_group.full_path)
+ end
+ end
+
+ describe 'pipeline parts' do
+ it { expect(described_class).to include_module(BulkImports::Pipeline) }
+ it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
+
+ it 'has extractors' do
+ expect(described_class.get_extractor).to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: {
+ query: BulkImports::Groups::Graphql::GetProjectsQuery
+ }
+ )
+ end
+
+ it 'has transformers' do
+ expect(described_class.transformers).to contain_exactly(
+ { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
+ )
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 4398b00e7e9..81c0ffc14d4 100644
--- a/spec/lib/bulk_imports/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
-RSpec.describe BulkImports::Stage do
+RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
@@ -19,18 +19,21 @@ RSpec.describe BulkImports::Stage do
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
expect(described_class.pipelines & pipelines).to eq(pipelines)
- expect(described_class.pipelines.last.last).to eq(BulkImports::Groups::Pipelines::EntityFinisher)
+ expect(described_class.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end
- end
- describe '.pipeline_exists?' do
- it 'returns true when the given pipeline name exists in the pipelines list' do
- expect(described_class.pipeline_exists?(BulkImports::Groups::Pipelines::GroupPipeline)).to eq(true)
- expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::GroupPipeline')).to eq(true)
+ it 'includes project entities pipeline' do
+ stub_feature_flags(bulk_import_projects: true)
+
+ expect(described_class.pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
end
- it 'returns false when the given pipeline name exists in the pipelines list' do
- expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
+ context 'when bulk_import_projects feature flag is disabled' do
+ it 'does not include project entities pipeline' do
+ stub_feature_flags(bulk_import_projects: false)
+
+ expect(described_class.pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline)
+ end
end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb
new file mode 100644
index 00000000000..c53c0849931
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline do
+ describe '#run' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ source_type: :project_entity,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Project',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:project_data) do
+ {
+ 'visibility' => 'private',
+ 'created_at' => 10.days.ago,
+ 'archived' => false,
+ 'shared_runners_enabled' => true,
+ 'container_registry_enabled' => true,
+ 'only_allow_merge_if_pipeline_succeeds' => true,
+ 'only_allow_merge_if_all_discussions_are_resolved' => true,
+ 'request_access_enabled' => true,
+ 'printing_merge_request_link_enabled' => true,
+ 'remove_source_branch_after_merge' => true,
+ 'autoclose_referenced_issues' => true,
+ 'suggestion_commit_message' => 'message',
+ 'wiki_enabled' => true
+ }
+ end
+
+ subject(:project_pipeline) { described_class.new(context) }
+
+ before do
+ allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data))
+ end
+
+ group.add_owner(user)
+ end
+
+ it 'imports new project into destination group', :aggregate_failures do
+ expect { project_pipeline.run }.to change { Project.count }.by(1)
+
+ project_path = 'my-destination-project'
+ imported_project = Project.find_by_path(project_path)
+
+ expect(imported_project).not_to be_nil
+ expect(imported_project.group).to eq(group)
+ expect(imported_project.suggestion_commit_message).to eq('message')
+ expect(imported_project.archived?).to eq(project_data['archived'])
+ expect(imported_project.shared_runners_enabled?).to eq(project_data['shared_runners_enabled'])
+ expect(imported_project.container_registry_enabled?).to eq(project_data['container_registry_enabled'])
+ expect(imported_project.only_allow_merge_if_pipeline_succeeds?).to eq(project_data['only_allow_merge_if_pipeline_succeeds'])
+ expect(imported_project.only_allow_merge_if_all_discussions_are_resolved?).to eq(project_data['only_allow_merge_if_all_discussions_are_resolved'])
+ expect(imported_project.request_access_enabled?).to eq(project_data['request_access_enabled'])
+ expect(imported_project.printing_merge_request_link_enabled?).to eq(project_data['printing_merge_request_link_enabled'])
+ expect(imported_project.remove_source_branch_after_merge?).to eq(project_data['remove_source_branch_after_merge'])
+ expect(imported_project.autoclose_referenced_issues?).to eq(project_data['autoclose_referenced_issues'])
+ expect(imported_project.wiki_enabled?).to eq(project_data['wiki_enabled'])
+ end
+ end
+
+ describe 'pipeline parts' do
+ it { expect(described_class).to include_module(BulkImports::Pipeline) }
+ it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
+
+ it 'has extractors' do
+ expect(described_class.get_extractor)
+ .to eq(
+ klass: BulkImports::Common::Extractors::GraphqlExtractor,
+ options: { query: BulkImports::Projects::Graphql::GetProjectQuery }
+ )
+ end
+
+ it 'has transformers' do
+ expect(described_class.transformers)
+ .to contain_exactly(
+ { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
+ { klass: BulkImports::Projects::Transformers::ProjectAttributesTransformer, options: nil }
+ )
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
new file mode 100644
index 00000000000..428812a34ef
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Stage do
+ let(:pipelines) do
+ [
+ [0, BulkImports::Projects::Pipelines::ProjectPipeline],
+ [1, BulkImports::Common::Pipelines::EntityFinisher]
+ ]
+ end
+
+ describe '.pipelines' do
+ it 'list all the pipelines with their stage number, ordered by stage' do
+ expect(described_class.pipelines).to eq(pipelines)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
new file mode 100644
index 00000000000..822bb9a5605
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer do
+ describe '#transform' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:destination_group) { create(:group) }
+ let_it_be(:project) { create(:project, name: 'My Source Project') }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ source_type: :project_entity,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'Destination Project Name',
+ destination_namespace: destination_group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ let(:data) do
+ {
+ 'name' => 'source_name',
+ 'visibility' => 'private'
+ }
+ end
+
+ subject(:transformed_data) { described_class.new.transform(context, data) }
+
+ it 'transforms name to destination name' do
+ expect(transformed_data[:name]).to eq(entity.destination_name)
+ end
+
+ it 'adds path as parameterized name' do
+ expect(transformed_data[:path]).to eq(entity.destination_name.parameterize)
+ end
+
+ it 'transforms visibility level' do
+ visibility = data['visibility']
+
+ expect(transformed_data).not_to have_key(:visibility)
+ expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel.string_options[visibility])
+ end
+
+ it 'adds import type' do
+ expect(transformed_data[:import_type]).to eq(described_class::PROJECT_IMPORT_TYPE)
+ end
+
+ describe 'namespace_id' do
+ context 'when destination namespace is present' do
+ it 'adds namespace_id' do
+ expect(transformed_data[:namespace_id]).to eq(destination_group.id)
+ end
+ end
+
+ context 'when destination namespace is blank' do
+ it 'does not add namespace_id key' do
+ entity = create(
+ :bulk_import_entity,
+ source_type: :project_entity,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'Destination Project Name',
+ destination_namespace: ''
+ )
+
+ context = double(entity: entity)
+
+ expect(described_class.new.transform(context, data)).not_to have_key(:namespace_id)
+ end
+ end
+ end
+
+ it 'converts all keys to symbols' do
+ expect(transformed_data.keys).to contain_exactly(:name, :path, :import_type, :visibility_level, :namespace_id)
+ end
+ end
+end
diff --git a/spec/lib/error_tracking/collector/dsn_spec.rb b/spec/lib/error_tracking/collector/dsn_spec.rb
new file mode 100644
index 00000000000..af55e6f20ec
--- /dev/null
+++ b/spec/lib/error_tracking/collector/dsn_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::Collector::Dsn do
+ describe '.build__url' do
+ let(:gitlab) do
+ double(
+ protocol: 'https',
+ https: true,
+ host: 'gitlab.example.com',
+ port: '4567',
+ relative_url_root: nil
+ )
+ end
+
+ subject { described_class.build_url('abcdef1234567890', 778) }
+
+ it 'returns a valid URL' do
+ allow(Settings).to receive(:gitlab).and_return(gitlab)
+ allow(Settings).to receive(:gitlab_on_standard_port?).and_return(false)
+
+ is_expected.to eq('https://abcdef1234567890@gitlab.example.com:4567/api/v4/error_tracking/collector/778')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb b/spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb
new file mode 100644
index 00000000000..3b73252709c
--- /dev/null
+++ b/spec/lib/gitlab/action_cable/request_store_callbacks_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ActionCable::RequestStoreCallbacks do
+ describe '.wrapper' do
+ it 'enables RequestStore in the inner block' do
+ expect(RequestStore.active?).to eq(false)
+
+ described_class.wrapper.call(
+ nil,
+ lambda do
+ expect(RequestStore.active?).to eq(true)
+ end
+ )
+
+ expect(RequestStore.active?).to eq(false)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb b/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb
new file mode 100644
index 00000000000..49056154744
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectsWithCoverage, schema: 20210818185845 do
+ let(:projects) { table(:projects) }
+ let(:project_ci_feature_usages) { table(:project_ci_feature_usages) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:ci_daily_build_group_report_results) { table(:ci_daily_build_group_report_results) }
+ let(:group) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let(:project_1) { projects.create!(namespace_id: group.id) }
+ let(:project_2) { projects.create!(namespace_id: group.id) }
+ let(:pipeline_1) { ci_pipelines.create!(project_id: project_1.id, source: 13) }
+ let(:pipeline_2) { ci_pipelines.create!(project_id: project_1.id, source: 13) }
+ let(:pipeline_3) { ci_pipelines.create!(project_id: project_2.id, source: 13) }
+ let(:pipeline_4) { ci_pipelines.create!(project_id: project_2.id, source: 13) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ before do
+ ci_daily_build_group_report_results.create!(
+ id: 1,
+ project_id: project_1.id,
+ date: 4.days.ago,
+ last_pipeline_id: pipeline_1.id,
+ ref_path: 'main',
+ group_name: 'rspec',
+ data: { coverage: 95.0 },
+ default_branch: true,
+ group_id: group.id
+ )
+
+ ci_daily_build_group_report_results.create!(
+ id: 2,
+ project_id: project_1.id,
+ date: 3.days.ago,
+ last_pipeline_id: pipeline_2.id,
+ ref_path: 'main',
+ group_name: 'rspec',
+ data: { coverage: 95.0 },
+ default_branch: true,
+ group_id: group.id
+ )
+
+ ci_daily_build_group_report_results.create!(
+ id: 3,
+ project_id: project_2.id,
+ date: 2.days.ago,
+ last_pipeline_id: pipeline_3.id,
+ ref_path: 'main',
+ group_name: 'rspec',
+ data: { coverage: 95.0 },
+ default_branch: true,
+ group_id: group.id
+ )
+
+ ci_daily_build_group_report_results.create!(
+ id: 4,
+ project_id: project_2.id,
+ date: 1.day.ago,
+ last_pipeline_id: pipeline_4.id,
+ ref_path: 'test_branch',
+ group_name: 'rspec',
+ data: { coverage: 95.0 },
+ default_branch: false,
+ group_id: group.id
+ )
+
+ stub_const("#{described_class}::INSERT_DELAY_SECONDS", 0)
+ end
+
+ it 'creates entries per project and default_branch combination in the given range', :aggregate_failures do
+ subject.perform(1, 4, 2)
+
+ entries = project_ci_feature_usages.order('project_id ASC, default_branch DESC')
+
+ expect(entries.count).to eq(3)
+ expect(entries[0]).to have_attributes(project_id: project_1.id, feature: 1, default_branch: true)
+ expect(entries[1]).to have_attributes(project_id: project_2.id, feature: 1, default_branch: true)
+ expect(entries[2]).to have_attributes(project_id: project_2.id, feature: 1, default_branch: false)
+ end
+
+ context 'when an entry for the project and default branch combination already exists' do
+ before do
+ subject.perform(1, 4, 2)
+ end
+
+ it 'does not create a new entry' do
+ expect { subject.perform(1, 4, 2) }.not_to change { project_ci_feature_usages.count }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
new file mode 100644
index 00000000000..a111007a984
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTable, schema: 20210730104800 do
+ it 'correctly extracts project topics into separate table' do
+ namespaces = table(:namespaces)
+ projects = table(:projects)
+ taggings = table(:taggings)
+ tags = table(:tags)
+ project_topics = table(:project_topics)
+ topics = table(:topics)
+
+ namespace = namespaces.create!(name: 'foo', path: 'foo')
+ project = projects.create!(namespace_id: namespace.id)
+ tag_1 = tags.create!(name: 'Topic1')
+ tag_2 = tags.create!(name: 'Topic2')
+ tag_3 = tags.create!(name: 'Topic3')
+ topic_3 = topics.create!(name: 'Topic3')
+ tagging_1 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_1.id)
+ tagging_2 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_2.id)
+ other_tagging = taggings.create!(taggable_type: 'Other', taggable_id: project.id, context: 'topics', tag_id: tag_1.id)
+ tagging_3 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_3.id)
+ tagging_4 = taggings.create!(taggable_type: 'Project', taggable_id: -1, context: 'topics', tag_id: tag_1.id)
+ tagging_5 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: -1)
+
+ subject.perform(tagging_1.id, tagging_5.id)
+
+ # Tagging records
+ expect { tagging_1.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { tagging_2.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { other_tagging.reload }.not_to raise_error(ActiveRecord::RecordNotFound)
+ expect { tagging_3.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { tagging_4.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { tagging_5.reload }.to raise_error(ActiveRecord::RecordNotFound)
+
+ # Topic records
+ topic_1 = topics.find_by(name: 'Topic1')
+ topic_2 = topics.find_by(name: 'Topic2')
+ expect(topics.all).to contain_exactly(topic_1, topic_2, topic_3)
+
+ # ProjectTopic records
+ expect(project_topics.all.map(&:topic_id)).to contain_exactly(topic_1.id, topic_2.id, topic_3.id)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
index 496ce151032..91e8dcdf880 100644
--- a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
@@ -91,6 +91,18 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers d
end
describe '#perform' do
+ it 'skips jobs that have already been completed' do
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: 'MigrateMergeRequestDiffCommitUsers',
+ arguments: [1, 10],
+ status: :succeeded
+ )
+
+ expect(migration).not_to receive(:get_data_to_update)
+
+ migration.perform(1, 10)
+ end
+
it 'migrates the data in the range' do
commits.create!(
merge_request_diff_id: diff.id,
diff --git a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
index 906a6a747c9..815dc2e73e5 100644
--- a/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 201909
subject(:migrate_pages_metadata) { described_class.new }
- describe '#perform_on_relation' do
+ describe '#perform' do
let(:namespaces) { table(:namespaces) }
let(:builds) { table(:ci_builds) }
let(:pages_metadata) { table(:project_pages_metadata) }
@@ -23,9 +23,9 @@ RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 201909
not_migrated_no_pages = projects.create!(namespace_id: namespace.id, name: 'Not Migrated No Pages')
project_not_in_relation_scope = projects.create!(namespace_id: namespace.id, name: 'Other')
- projects_relation = projects.where(id: [not_migrated_with_pages, not_migrated_no_pages, migrated])
+ ids = [not_migrated_no_pages.id, not_migrated_with_pages.id, migrated.id]
- migrate_pages_metadata.perform_on_relation(projects_relation)
+ migrate_pages_metadata.perform(ids.min, ids.max)
expect(pages_metadata.find_by_project_id(not_migrated_with_pages.id).deployed).to eq(true)
expect(pages_metadata.find_by_project_id(not_migrated_no_pages.id).deployed).to eq(false)
@@ -33,12 +33,4 @@ RSpec.describe Gitlab::BackgroundMigration::MigratePagesMetadata, schema: 201909
expect(pages_metadata.find_by_project_id(project_not_in_relation_scope.id)).to be_nil
end
end
-
- describe '#perform' do
- it 'creates relation and delegates to #perform_on_relation' do
- expect(migrate_pages_metadata).to receive(:perform_on_relation).with(projects.where(id: 3..5))
-
- migrate_pages_metadata.perform(3, 5)
- end
- end
end
diff --git a/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb
new file mode 100644
index 00000000000..f2fb2ab6b6e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::StealMigrateMergeRequestDiffCommitUsers do
+ let(:migration) { described_class.new }
+
+ describe '#perform' do
+ it 'processes the background migration' do
+ spy = instance_spy(
+ Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers
+ )
+
+ allow(Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers)
+ .to receive(:new)
+ .and_return(spy)
+
+ expect(spy).to receive(:perform).with(1, 4)
+ expect(migration).to receive(:schedule_next_job)
+
+ migration.perform(1, 4)
+ end
+ end
+
+ describe '#schedule_next_job' do
+ it 'schedules the next job in ascending order' do
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: 'MigrateMergeRequestDiffCommitUsers',
+ arguments: [10, 20]
+ )
+
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: 'MigrateMergeRequestDiffCommitUsers',
+ arguments: [40, 50]
+ )
+
+ expect(BackgroundMigrationWorker)
+ .to receive(:perform_in)
+ .with(5.minutes, 'StealMigrateMergeRequestDiffCommitUsers', [10, 20])
+
+ migration.schedule_next_job
+ end
+
+ it 'does not schedule any new jobs when there are none' do
+ expect(BackgroundMigrationWorker).not_to receive(:perform_in)
+
+ migration.schedule_next_job
+ end
+ end
+end
diff --git a/spec/lib/gitlab/changelog/config_spec.rb b/spec/lib/gitlab/changelog/config_spec.rb
index a464c1e57e5..c410ba4d116 100644
--- a/spec/lib/gitlab/changelog/config_spec.rb
+++ b/spec/lib/gitlab/changelog/config_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Changelog::Config do
+ include ProjectForksHelper
+
let(:project) { build_stubbed(:project) }
describe '.from_git' do
@@ -13,7 +15,7 @@ RSpec.describe Gitlab::Changelog::Config do
expect(described_class)
.to receive(:from_hash)
- .with(project, 'date_format' => '%Y')
+ .with(project, { 'date_format' => '%Y' }, nil)
described_class.from_git(project)
end
@@ -33,12 +35,25 @@ RSpec.describe Gitlab::Changelog::Config do
describe '.from_hash' do
it 'sets the configuration according to a Hash' do
+ user1 = create(:user)
+ user2 = create(:user)
+ user3 = create(:user)
+ group = create(:group, path: 'group')
+ group2 = create(:group, path: 'group-path')
+ group.add_developer(user1)
+ group.add_developer(user2)
+ group2.add_developer(user3)
+
config = described_class.from_hash(
project,
- 'date_format' => 'foo',
- 'template' => 'bar',
- 'categories' => { 'foo' => 'bar' },
- 'tag_regex' => 'foo'
+ {
+ 'date_format' => 'foo',
+ 'template' => 'bar',
+ 'categories' => { 'foo' => 'bar' },
+ 'tag_regex' => 'foo',
+ 'include_groups' => %w[group group-path non-existent-group]
+ },
+ user1
)
expect(config.date_format).to eq('foo')
@@ -47,6 +62,7 @@ RSpec.describe Gitlab::Changelog::Config do
expect(config.categories).to eq({ 'foo' => 'bar' })
expect(config.tag_regex).to eq('foo')
+ expect(config.always_credit_user_ids).to match_array([user1.id, user2.id, user3.id])
end
it 'raises Error when the categories are not a Hash' do
@@ -66,20 +82,33 @@ RSpec.describe Gitlab::Changelog::Config do
end
describe '#contributor?' do
- it 'returns true if a user is a contributor' do
- user = build_stubbed(:author)
+ let(:project) { create(:project, :public, :repository) }
- allow(project.team).to receive(:contributor?).with(user).and_return(true)
-
- expect(described_class.new(project).contributor?(user)).to eq(true)
- end
+ context 'when user is a member of project' do
+ let(:user) { create(:user) }
- it "returns true if a user isn't a contributor" do
- user = build_stubbed(:author)
+ before do
+ project.add_developer(user)
+ end
- allow(project.team).to receive(:contributor?).with(user).and_return(false)
+ it { expect(described_class.new(project).contributor?(user)).to eq(false) }
+ end
- expect(described_class.new(project).contributor?(user)).to eq(false)
+ context 'when user has at least one merge request merged into default_branch' do
+ let(:contributor) { create(:user) }
+ let(:user_without_access) { create(:user) }
+ let(:user_fork) { fork_project(project, contributor, repository: true) }
+
+ before do
+ create(:merge_request, :merged,
+ author: contributor,
+ target_project: project,
+ source_project: user_fork,
+ target_branch: project.default_branch.to_s)
+ end
+
+ it { expect(described_class.new(project).contributor?(contributor)).to eq(true) }
+ it { expect(described_class.new(project).contributor?(user_without_access)).to eq(false) }
end
end
@@ -107,4 +136,55 @@ RSpec.describe Gitlab::Changelog::Config do
expect(config.format_date(time)).to eq('2021-01-05')
end
end
+
+ describe '#always_credit_author?' do
+ let_it_be(:group_member) { create(:user) }
+ let_it_be(:non_group_member) { create(:user) }
+ let_it_be(:group) { create(:group, :private, path: 'group') }
+
+ before do
+ group.add_developer(group_member)
+ end
+
+ context 'when include_groups is defined' do
+ context 'when user generating changelog has access to group' do
+ it 'returns whether author should always be credited' do
+ config = described_class.from_hash(
+ project,
+ { 'include_groups' => ['group'] },
+ group_member
+ )
+
+ expect(config.always_credit_author?(group_member)).to eq(true)
+ expect(config.always_credit_author?(non_group_member)).to eq(false)
+ end
+ end
+
+ context 'when user generating changelog has no access to group' do
+ it 'always returns false' do
+ config = described_class.from_hash(
+ project,
+ { 'include_groups' => ['group'] },
+ non_group_member
+ )
+
+ expect(config.always_credit_author?(group_member)).to eq(false)
+ expect(config.always_credit_author?(non_group_member)).to eq(false)
+ end
+ end
+ end
+
+ context 'when include_groups is not defined' do
+ it 'always returns false' do
+ config = described_class.from_hash(
+ project,
+ {},
+ group_member
+ )
+
+ expect(config.always_credit_author?(group_member)).to eq(false)
+ expect(config.always_credit_author?(non_group_member)).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/changelog/release_spec.rb b/spec/lib/gitlab/changelog/release_spec.rb
index f95244d6750..d8434821640 100644
--- a/spec/lib/gitlab/changelog/release_spec.rb
+++ b/spec/lib/gitlab/changelog/release_spec.rb
@@ -94,6 +94,30 @@ RSpec.describe Gitlab::Changelog::Release do
end
end
+ context 'when the author should always be credited' do
+ it 'includes the author' do
+ allow(config).to receive(:contributor?).with(author).and_return(false)
+ allow(config).to receive(:always_credit_author?).with(author).and_return(true)
+
+ release.add_entry(
+ title: 'Entry title',
+ commit: commit,
+ category: 'fixed',
+ author: author
+ )
+
+ expect(release.to_markdown).to eq(<<~OUT)
+ ## 1.0.0 (2021-01-05)
+
+ ### fixed (1 change)
+
+ - [Entry title](#{commit.to_reference(full: true)}) \
+ by #{author.to_reference(full: true)}
+
+ OUT
+ end
+ end
+
context 'when a category has no entries' do
it "isn't included in the output" do
config.categories['kittens'] = 'Kittens'
diff --git a/spec/lib/gitlab/chat/command_spec.rb b/spec/lib/gitlab/chat/command_spec.rb
index 89c693daaa0..d99c07d1fa3 100644
--- a/spec/lib/gitlab/chat/command_spec.rb
+++ b/spec/lib/gitlab/chat/command_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::Chat::Command do
let(:pipeline) { command.create_pipeline }
before do
- stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
+ stub_ci_pipeline_to_return_yaml_file
project.add_developer(chat_name.user)
end
diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb
index 4a74dfcec34..633c4baa931 100644
--- a/spec/lib/gitlab/checks/changes_access_spec.rb
+++ b/spec/lib/gitlab/checks/changes_access_spec.rb
@@ -8,53 +8,35 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
subject { changes_access }
describe '#validate!' do
- shared_examples '#validate!' do
- before do
- allow(project).to receive(:lfs_enabled?).and_return(true)
- end
-
- context 'without failed checks' do
- it "doesn't raise an error" do
- expect { subject.validate! }.not_to raise_error
- end
-
- it 'calls lfs checks' do
- expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
- expect(instance).to receive(:validate!)
- end
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ end
- subject.validate!
- end
+ context 'without failed checks' do
+ it "doesn't raise an error" do
+ expect { subject.validate! }.not_to raise_error
end
- context 'when time limit was reached' do
- it 'raises a TimeoutError' do
- logger = Gitlab::Checks::TimedLogger.new(start_time: timeout.ago, timeout: timeout)
- access = described_class.new(changes,
- project: project,
- user_access: user_access,
- protocol: protocol,
- logger: logger)
-
- expect { access.validate! }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
+ it 'calls lfs checks' do
+ expect_next_instance_of(Gitlab::Checks::LfsCheck) do |instance|
+ expect(instance).to receive(:validate!)
end
- end
- end
- context 'with batched commits enabled' do
- before do
- stub_feature_flags(changes_batch_commits: true)
+ subject.validate!
end
-
- it_behaves_like '#validate!'
end
- context 'with batched commits disabled' do
- before do
- stub_feature_flags(changes_batch_commits: false)
- end
+ context 'when time limit was reached' do
+ it 'raises a TimeoutError' do
+ logger = Gitlab::Checks::TimedLogger.new(start_time: timeout.ago, timeout: timeout)
+ access = described_class.new(changes,
+ project: project,
+ user_access: user_access,
+ protocol: protocol,
+ logger: logger)
- it_behaves_like '#validate!'
+ expect { access.validate! }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
+ end
end
end
@@ -192,6 +174,101 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
end
end
+ describe '#single_change_accesses' do
+ let(:commits_for) { {} }
+ let(:expected_accesses) { [] }
+
+ shared_examples '#single_change_access' do
+ before do
+ commits_for.each do |id, commits|
+ expect(subject)
+ .to receive(:commits_for)
+ .with(id)
+ .and_return(commits)
+ end
+ end
+
+ it 'returns an array of SingleChangeAccess' do
+ # Commits are wrapped in a Gitlab::Lazy and thus need to be resolved
+ # first such that we can directly compare types.
+ actual_accesses = subject.single_change_accesses
+ .each { |access| access.instance_variable_set(:@commits, access.commits.to_a) }
+
+ expect(actual_accesses).to match_array(expected_accesses)
+ end
+ end
+
+ context 'with no changes' do
+ let(:changes) { [] }
+
+ it_behaves_like '#single_change_access'
+ end
+
+ context 'with a single change and no new commits' do
+ let(:commits_for) { { 'new' => [] } }
+ let(:changes) do
+ [
+ { oldrev: 'old', newrev: 'new', ref: 'refs/heads/branch' }
+ ]
+ end
+
+ let(:expected_accesses) do
+ [
+ have_attributes(oldrev: 'old', newrev: 'new', ref: 'refs/heads/branch', commits: [])
+ ]
+ end
+
+ it_behaves_like '#single_change_access'
+ end
+
+ context 'with a single change and new commits' do
+ let(:commits_for) { { 'new' => [create_commit('new', [])] } }
+ let(:changes) do
+ [
+ { oldrev: 'old', newrev: 'new', ref: 'refs/heads/branch' }
+ ]
+ end
+
+ let(:expected_accesses) do
+ [
+ have_attributes(oldrev: 'old', newrev: 'new', ref: 'refs/heads/branch', commits: [create_commit('new', [])])
+ ]
+ end
+
+ it_behaves_like '#single_change_access'
+ end
+
+ context 'with multiple changes' do
+ let(:commits_for) do
+ {
+ 'a' => [create_commit('a', [])],
+ 'c' => [create_commit('c', [])],
+ 'd' => []
+ }
+ end
+
+ let(:changes) do
+ [
+ { newrev: 'a', ref: 'refs/heads/a' },
+ { oldrev: 'b', ref: 'refs/heads/b' },
+ { oldrev: 'a', newrev: 'c', ref: 'refs/heads/c' },
+ { newrev: 'd', ref: 'refs/heads/d' }
+ ]
+ end
+
+ let(:expected_accesses) do
+ [
+ have_attributes(newrev: 'a', ref: 'refs/heads/a', commits: [create_commit('a', [])]),
+ have_attributes(oldrev: 'b', ref: 'refs/heads/b', commits: []),
+ have_attributes(oldrev: 'a', newrev: 'c', ref: 'refs/heads/c', commits: [create_commit('c', [])]),
+ have_attributes(newrev: 'd', ref: 'refs/heads/d', commits: [])
+ ]
+ end
+
+ it_behaves_like '#single_change_access'
+ end
+ end
+
def create_commit(id, parent_ids)
Gitlab::Git::Commit.new(project.repository, {
id: id,
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 5b47d3a3922..0bb26babfc0 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -169,6 +169,22 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it { expect(entry).to be_valid }
end
end
+
+ context 'when rules are used' do
+ let(:config) { { script: 'ls', cache: { key: 'test' }, rules: rules } }
+
+ let(:rules) do
+ [
+ { if: '$CI_PIPELINE_SOURCE == "schedule"', when: 'never' },
+ [
+ { if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' },
+ { if: '$CI_PIPELINE_SOURCE == "merge_request_event"' }
+ ]
+ ]
+ end
+
+ it { expect(entry).to be_valid }
+ end
end
context 'when entry value is not correct' do
@@ -485,6 +501,70 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
end
+
+ context 'when invalid rules are used' do
+ let(:config) { { script: 'ls', cache: { key: 'test' }, rules: rules } }
+
+ context 'with rules nested more than max allowed levels' do
+ let(:sample_rule) { { if: '$THIS == "other"', when: 'always' } }
+
+ let(:rules) do
+ [
+ { if: '$THIS == "that"', when: 'always' },
+ [
+ { if: '$SKIP', when: 'never' },
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [
+ sample_rule,
+ [sample_rule]
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ ]
+ end
+
+ it { expect(entry).not_to be_valid }
+ end
+
+ context 'with rules with invalid keys' do
+ let(:rules) do
+ [
+ { invalid_key: 'invalid' },
+ [
+ { if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH' },
+ { if: '$CI_PIPELINE_SOURCE == "merge_request_event"' }
+ ]
+ ]
+ end
+
+ it { expect(entry).not_to be_valid }
+ end
+ end
end
end
@@ -618,6 +698,29 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
end
+
+ context 'when job is using tags' do
+ context 'when limit is reached' do
+ let(:tags) { Array.new(100) { |i| "tag-#{i}" } }
+ let(:config) { { tags: tags, script: 'test' } }
+
+ it 'returns error', :aggregate_failures do
+ expect(entry).not_to be_valid
+ expect(entry.errors)
+ .to include "tags config must be less than the limit of #{Gitlab::Ci::Config::Entry::Tags::TAGS_LIMIT} tags"
+ end
+ end
+
+ context 'when limit is not reached' do
+ let(:config) { { tags: %w[tag1 tag2], script: 'test' } }
+
+ it 'returns a valid entry', :aggregate_failures do
+ expect(entry).to be_valid
+ expect(entry.errors).to be_empty
+ expect(entry.tags).to eq(%w[tag1 tag2])
+ end
+ end
+ end
end
describe '#manual_action?' do
diff --git a/spec/lib/gitlab/ci/config/entry/rules_spec.rb b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
index 91252378541..cfec33003e4 100644
--- a/spec/lib/gitlab/ci/config/entry/rules_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
let(:config) do
[
{ if: '$THIS == "that"', when: 'always' },
- [{ if: '$SKIP', when: 'never' }]
+ [{ if: '$SKIP', when: 'never' }, { if: '$THIS == "other"', when: 'always' }]
]
end
@@ -64,11 +64,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
let(:config) do
[
{ if: '$THIS == "that"', when: 'always' },
- [{ if: '$SKIP', when: 'never' }, [{ if: '$THIS == "other"', when: 'aways' }]]
+ [{ if: '$SKIP', when: 'never' }, [{ if: '$THIS == "other"', when: 'always' }]]
]
end
- it { is_expected.not_to be_valid }
+ it { is_expected.to be_valid }
end
end
@@ -119,7 +119,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules do
context 'with rules nested more than one level' do
let(:first_rule) { { if: '$THIS == "that"', when: 'always' } }
let(:second_rule) { { if: '$SKIP', when: 'never' } }
- let(:third_rule) { { if: '$THIS == "other"', when: 'aways' } }
+ let(:third_rule) { { if: '$THIS == "other"', when: 'always' } }
let(:config) do
[
diff --git a/spec/lib/gitlab/ci/config/entry/tags_spec.rb b/spec/lib/gitlab/ci/config/entry/tags_spec.rb
new file mode 100644
index 00000000000..79317de373b
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/tags_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Tags do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validation' do
+ context 'when tags config value is correct' do
+ let(:config) { %w[tag1 tag2] }
+
+ describe '#value' do
+ it 'returns tags configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ context 'when tags config is not an array of strings' do
+ let(:config) { [1, 2] }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'tags config should be an array of strings'
+ end
+ end
+
+ context 'when tags limit is reached' do
+ let(:config) { Array.new(50) {|i| "tag-#{i}" } }
+
+ context 'when ci_build_tags_limit is enabled' do
+ before do
+ stub_feature_flags(ci_build_tags_limit: true)
+ end
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "tags config must be less than the limit of #{described_class::TAGS_LIMIT} tags"
+ end
+ end
+
+ context 'when ci_build_tags_limit is disabled' do
+ before do
+ stub_feature_flags(ci_build_tags_limit: false)
+ end
+
+ it 'does not report an error' do
+ expect(entry.errors).to be_empty
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb
index 15293429354..4017accb462 100644
--- a/spec/lib/gitlab/ci/cron_parser_spec.rb
+++ b/spec/lib/gitlab/ci/cron_parser_spec.rb
@@ -297,4 +297,65 @@ RSpec.describe Gitlab::Ci::CronParser do
it { is_expected.to eq(true) }
end
end
+
+ describe '.parse_natural', :aggregate_failures do
+ let(:cron_line) { described_class.parse_natural_with_timestamp(time, { unit: 'day', duration: 1 }) }
+ let(:time) { Time.parse('Mon, 30 Aug 2021 06:29:44.067132000 UTC +00:00') }
+ let(:hours) { Fugit::Cron.parse(cron_line).hours }
+ let(:minutes) { Fugit::Cron.parse(cron_line).minutes }
+ let(:weekdays) { Fugit::Cron.parse(cron_line).weekdays.first }
+ let(:months) { Fugit::Cron.parse(cron_line).months }
+
+ context 'when repeat cycle is day' do
+ it 'generates daily cron expression', :aggregate_failures do
+ expect(hours).to include time.hour
+ expect(minutes).to include time.min
+ end
+ end
+
+ context 'when repeat cycle is week' do
+ let(:cron_line) { described_class.parse_natural_with_timestamp(time, { unit: 'week', duration: 1 }) }
+
+ it 'generates weekly cron expression', :aggregate_failures do
+ expect(hours).to include time.hour
+ expect(minutes).to include time.min
+ expect(weekdays).to include time.wday
+ end
+ end
+
+ context 'when repeat cycle is month' do
+ let(:cron_line) { described_class.parse_natural_with_timestamp(time, { unit: 'month', duration: 3 }) }
+
+ it 'generates monthly cron expression', :aggregate_failures do
+ expect(minutes).to include time.min
+ expect(months).to include time.month
+ end
+
+ context 'when an unsupported duration is specified' do
+ subject { described_class.parse_natural_with_timestamp(time, { unit: 'month', duration: 7 }) }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(NotImplementedError, 'The cadence {:unit=>"month", :duration=>7} is not supported')
+ end
+ end
+ end
+
+ context 'when repeat cycle is year' do
+ let(:cron_line) { described_class.parse_natural_with_timestamp(time, { unit: 'year', duration: 1 }) }
+
+ it 'generates yearly cron expression', :aggregate_failures do
+ expect(hours).to include time.hour
+ expect(minutes).to include time.min
+ expect(months).to include time.month
+ end
+ end
+
+ context 'when the repeat cycle is not implemented' do
+ subject { described_class.parse_natural_with_timestamp(time, { unit: 'quarterly', duration: 1 }) }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(NotImplementedError, 'The cadence unit quarterly is not implemented')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/parsers/security/common_spec.rb b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
index c6387bf615b..c49673f5a4a 100644
--- a/spec/lib/gitlab/ci/parsers/security/common_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/common_spec.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-# TODO remove duplication from spec/lib/gitlab/ci/parsers/security/common_spec.rb and spec/lib/gitlab/ci/parsers/security/common_spec.rb
-# See https://gitlab.com/gitlab-org/gitlab/-/issues/336589
require 'spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Security::Common do
@@ -15,11 +13,18 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
# The path 'yarn.lock' was initially used by DependencyScanning, it is okay for SAST locations to use it, but this could be made better
let(:location) { ::Gitlab::Ci::Reports::Security::Locations::Sast.new(file_path: 'yarn.lock', start_line: 1, end_line: 1) }
let(:tracking_data) { nil }
+ let(:vulnerability_flags_data) do
+ [
+ ::Gitlab::Ci::Reports::Security::Flag.new(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink'),
+ ::Gitlab::Ci::Reports::Security::Flag.new(type: 'flagged-as-likely-false-positive', origin: 'post analyzer Y', description: 'integer to sink')
+ ]
+ end
before do
allow_next_instance_of(described_class) do |parser|
allow(parser).to receive(:create_location).and_return(location)
allow(parser).to receive(:tracking_data).and_return(tracking_data)
+ allow(parser).to receive(:create_flags).and_return(vulnerability_flags_data)
end
artifact.each_blob { |blob| described_class.parse!(blob, report, vulnerability_finding_signatures_enabled) }
@@ -233,6 +238,17 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Common do
end
end
+ describe 'parsing flags' do
+ it 'returns flags object for each finding' do
+ flags = report.findings.first.flags
+
+ expect(flags).to contain_exactly(
+ have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink'),
+ have_attributes(type: 'flagged-as-likely-false-positive', origin: 'post analyzer Y', description: 'integer to sink')
+ )
+ end
+ end
+
describe 'parsing links' do
it 'returns links object for each finding', :aggregate_failures do
links = report.findings.flat_map(&:links)
diff --git a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
index f434ffd12bf..951e0576a58 100644
--- a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
using RSpec::Parameterized::TableSyntax
where(:report_type, :expected_errors, :valid_data) do
- :sast | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ 'sast' | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
+ :sast | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
:secret_detection | ['root is missing required keys: vulnerabilities'] | { 'version' => '10.0.0', 'vulnerabilities' => [] }
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb
index 5fa414f5bd1..32c92724f62 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build/associations_spec.rb
@@ -3,10 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Build::Associations do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user, developer_projects: [project]) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, developer_projects: [project]) }
+
let(:pipeline) { Ci::Pipeline.new }
- let(:step) { described_class.new(pipeline, command) }
+ let(:bridge) { nil }
+
+ let(:variables_attributes) do
+ [{ key: 'first', secret_value: 'world' },
+ { key: 'second', secret_value: 'second_world' }]
+ end
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
@@ -20,7 +26,26 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build::Associations do
merge_request: nil,
project: project,
current_user: user,
- bridge: bridge)
+ bridge: bridge,
+ variables_attributes: variables_attributes)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ shared_examples 'breaks the chain' do
+ it 'returns true' do
+ step.perform!
+
+ expect(step.break?).to be true
+ end
+ end
+
+ shared_examples 'does not break the chain' do
+ it 'returns false' do
+ step.perform!
+
+ expect(step.break?).to be false
+ end
end
context 'when a bridge is passed in to the pipeline creation' do
@@ -37,26 +62,83 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build::Associations do
)
end
- it 'never breaks the chain' do
- step.perform!
-
- expect(step.break?).to eq(false)
- end
+ it_behaves_like 'does not break the chain'
end
context 'when a bridge is not passed in to the pipeline creation' do
- let(:bridge) { nil }
-
it 'leaves the source pipeline empty' do
step.perform!
expect(pipeline.source_pipeline).to be_nil
end
- it 'never breaks the chain' do
+ it_behaves_like 'does not break the chain'
+ end
+
+ it 'sets pipeline variables' do
+ step.perform!
+
+ expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
+ .to eq variables_attributes.map(&:with_indifferent_access)
+ end
+
+ context 'when project setting restrict_user_defined_variables is enabled' do
+ before do
+ project.update!(restrict_user_defined_variables: true)
+ end
+
+ context 'when user is developer' do
+ it_behaves_like 'breaks the chain'
+
+ it 'returns an error on variables_attributes', :aggregate_failures do
+ step.perform!
+
+ expect(pipeline.errors.full_messages).to eq(['Insufficient permissions to set pipeline variables'])
+ expect(pipeline.variables).to be_empty
+ end
+
+ context 'when variables_attributes is not specified' do
+ let(:variables_attributes) { nil }
+
+ it_behaves_like 'does not break the chain'
+
+ it 'assigns empty variables' do
+ step.perform!
+
+ expect(pipeline.variables).to be_empty
+ end
+ end
+ end
+
+ context 'when user is maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'does not break the chain'
+
+ it 'assigns variables_attributes' do
+ step.perform!
+
+ expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
+ .to eq variables_attributes.map(&:with_indifferent_access)
+ end
+ end
+ end
+
+ context 'with duplicate pipeline variables' do
+ let(:variables_attributes) do
+ [{ key: 'first', secret_value: 'world' },
+ { key: 'first', secret_value: 'second_world' }]
+ end
+
+ it_behaves_like 'breaks the chain'
+
+ it 'returns an error for variables_attributes' do
step.perform!
- expect(step.break?).to eq(false)
+ expect(pipeline.errors.full_messages).to eq(['Duplicate variable name: first'])
+ expect(pipeline.variables).to be_empty
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index 7771289abe6..dca2204f544 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -8,11 +8,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
let(:pipeline) { Ci::Pipeline.new }
- let(:variables_attributes) do
- [{ key: 'first', secret_value: 'world' },
- { key: 'second', secret_value: 'second_world' }]
- end
-
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
source: :push,
@@ -24,100 +19,26 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
schedule: nil,
merge_request: nil,
project: project,
- current_user: user,
- variables_attributes: variables_attributes)
+ current_user: user)
end
let(:step) { described_class.new(pipeline, command) }
- shared_examples 'builds pipeline' do
- it 'builds a pipeline with the expected attributes' do
- step.perform!
-
- expect(pipeline.sha).not_to be_empty
- expect(pipeline.sha).to eq project.commit.id
- expect(pipeline.ref).to eq 'master'
- expect(pipeline.tag).to be false
- expect(pipeline.user).to eq user
- expect(pipeline.project).to eq project
- end
- end
-
- shared_examples 'breaks the chain' do
- it 'returns true' do
- step.perform!
-
- expect(step.break?).to be true
- end
- end
-
- shared_examples 'does not break the chain' do
- it 'returns false' do
- step.perform!
-
- expect(step.break?).to be false
- end
- end
-
- before do
- stub_ci_pipeline_yaml_file(gitlab_ci_yaml)
- end
-
- it_behaves_like 'does not break the chain'
- it_behaves_like 'builds pipeline'
-
- it 'sets pipeline variables' do
+ it 'does not break the chain' do
step.perform!
- expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
- .to eq variables_attributes.map(&:with_indifferent_access)
+ expect(step.break?).to be false
end
- context 'when project setting restrict_user_defined_variables is enabled' do
- before do
- project.update!(restrict_user_defined_variables: true)
- end
-
- context 'when user is developer' do
- it_behaves_like 'breaks the chain'
- it_behaves_like 'builds pipeline'
-
- it 'returns an error on variables_attributes', :aggregate_failures do
- step.perform!
-
- expect(pipeline.errors.full_messages).to eq(['Insufficient permissions to set pipeline variables'])
- expect(pipeline.variables).to be_empty
- end
-
- context 'when variables_attributes is not specified' do
- let(:variables_attributes) { nil }
-
- it_behaves_like 'does not break the chain'
- it_behaves_like 'builds pipeline'
-
- it 'assigns empty variables' do
- step.perform!
-
- expect(pipeline.variables).to be_empty
- end
- end
- end
-
- context 'when user is maintainer' do
- before do
- project.add_maintainer(user)
- end
-
- it_behaves_like 'does not break the chain'
- it_behaves_like 'builds pipeline'
-
- it 'assigns variables_attributes' do
- step.perform!
+ it 'builds a pipeline with the expected attributes' do
+ step.perform!
- expect(pipeline.variables.map { |var| var.slice(:key, :secret_value) })
- .to eq variables_attributes.map(&:with_indifferent_access)
- end
- end
+ expect(pipeline.sha).not_to be_empty
+ expect(pipeline.sha).to eq project.commit.id
+ expect(pipeline.ref).to eq 'master'
+ expect(pipeline.tag).to be false
+ expect(pipeline.user).to eq user
+ expect(pipeline.project).to eq project
end
it 'returns a valid pipeline' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 2727f2603cd..27a5abf988c 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -44,6 +44,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
expect(build_statuses(pipeline)).to contain_exactly('pending')
end
+ it 'cancels the builds with 2 queries to avoid query timeout' do
+ second_query_regex = /WHERE "ci_pipelines"\."id" = \d+ AND \(NOT EXISTS/
+ recorder = ActiveRecord::QueryRecorder.new { perform }
+ second_query = recorder.occurrences.keys.filter { |occ| occ =~ second_query_regex }
+
+ expect(second_query).to be_one
+ end
+
context 'when the previous pipeline has a child pipeline' do
let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index c22a0e23794..0d78ce3440a 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -341,4 +341,40 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
end
end
end
+
+ describe '#observe_step_duration' do
+ context 'when ci_pipeline_creation_step_duration_tracking is enabled' do
+ it 'adds the duration to the step duration histogram' do
+ histogram = double(:histogram)
+ duration = 1.hour
+
+ expect(::Gitlab::Ci::Pipeline::Metrics).to receive(:pipeline_creation_step_duration_histogram)
+ .and_return(histogram)
+ expect(histogram).to receive(:observe)
+ .with({ step: 'Gitlab::Ci::Pipeline::Chain::Build' }, duration.seconds)
+
+ described_class.new.observe_step_duration(
+ Gitlab::Ci::Pipeline::Chain::Build,
+ duration
+ )
+ end
+ end
+
+ context 'when ci_pipeline_creation_step_duration_tracking is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_creation_step_duration_tracking: false)
+ end
+
+ it 'does nothing' do
+ duration = 1.hour
+
+ expect(::Gitlab::Ci::Pipeline::Metrics).not_to receive(:pipeline_creation_step_duration_histogram)
+
+ described_class.new.observe_step_duration(
+ Gitlab::Ci::Pipeline::Chain::Build,
+ duration
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
index 42ec9ab6f5d..e0d656f456e 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb
@@ -92,6 +92,27 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Config::Content do
expect(pipeline.pipeline_config.content).to eq(config_content_result)
expect(command.config_content).to eq(config_content_result)
end
+
+ context 'when path specifies a refname' do
+ let(:ci_config_path) { 'path/to/.gitlab-ci.yml@another-group/another-repo:refname' }
+ let(:config_content_result) do
+ <<~EOY
+ ---
+ include:
+ - project: another-group/another-repo
+ file: path/to/.gitlab-ci.yml
+ ref: refname
+ EOY
+ end
+
+ it 'builds root config including the path and refname to another repository' do
+ subject.perform!
+
+ expect(pipeline.config_source).to eq 'external_project_source'
+ expect(pipeline.pipeline_config.content).to eq(config_content_result)
+ expect(command.config_content).to eq(config_content_result)
+ end
+ end
end
context 'when config is defined in the default .gitlab-ci.yml' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
index 83d47ae6819..e8eb3333b88 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -8,8 +8,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
let(:pipeline) { build_stubbed(:ci_pipeline) }
let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project) }
- let(:first_step) { spy('first step') }
- let(:second_step) { spy('second step') }
+ let(:first_step) { spy('first step', name: 'FirstStep') }
+ let(:second_step) { spy('second step', name: 'SecondStep') }
let(:sequence) { [first_step, second_step] }
let(:histogram) { spy('prometheus metric') }
@@ -61,6 +61,17 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
expect(histogram).to have_received(:observe)
end
+ it 'adds step sequence duration to duration histogram' do
+ expect(command.metrics)
+ .to receive(:pipeline_creation_step_duration_histogram)
+ .twice
+ .and_return(histogram)
+ expect(histogram).to receive(:observe).with({ step: 'FirstStep' }, any_args).ordered
+ expect(histogram).to receive(:observe).with({ step: 'SecondStep' }, any_args).ordered
+
+ subject.build!
+ end
+
it 'records pipeline size by pipeline source in a histogram' do
allow(command.metrics)
.to receive(:pipeline_size_histogram)
diff --git a/spec/lib/gitlab/ci/pipeline/metrics_spec.rb b/spec/lib/gitlab/ci/pipeline/metrics_spec.rb
new file mode 100644
index 00000000000..83b969ff3c4
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/metrics_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Ci::Pipeline::Metrics do
+ describe '.pipeline_creation_step_duration_histogram' do
+ around do |example|
+ described_class.clear_memoization(:pipeline_creation_step_histogram)
+
+ example.run
+
+ described_class.clear_memoization(:pipeline_creation_step_histogram)
+ end
+
+ it 'adds the step to the step duration histogram' do
+ expect(::Gitlab::Metrics).to receive(:histogram)
+ .with(
+ :gitlab_ci_pipeline_creation_step_duration_seconds,
+ 'Duration of each pipeline creation step',
+ { step: nil },
+ [0.01, 0.05, 0.1, 0.5, 1.0, 2.0, 5.0, 10.0, 15.0, 20.0, 50.0, 240.0]
+ )
+
+ described_class.pipeline_creation_step_duration_histogram
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 58938251ca1..0c28515b574 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -490,12 +490,21 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
context 'when job belongs to a resource group' do
- let(:attributes) { { name: 'rspec', ref: 'master', resource_group_key: 'iOS' } }
+ let(:resource_group) { 'iOS' }
+ let(:attributes) { { name: 'rspec', ref: 'master', resource_group_key: resource_group, environment: 'production' }}
it 'returns a job with resource group' do
expect(subject.resource_group).not_to be_nil
expect(subject.resource_group.key).to eq('iOS')
end
+
+ context 'when resource group has $CI_ENVIRONMENT_NAME in it' do
+ let(:resource_group) { 'test/$CI_ENVIRONMENT_NAME' }
+
+ it 'expands environment name' do
+ expect(subject.resource_group.key).to eq('test/production')
+ end
+ end
end
end
@@ -1140,16 +1149,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'does not have errors' do
expect(subject.errors).to be_empty
end
-
- context 'when ci_same_stage_job_needs FF is disabled' do
- before do
- stub_feature_flags(ci_same_stage_job_needs: false)
- end
-
- it 'has errors' do
- expect(subject.errors).to contain_exactly("'rspec' job needs 'build' job, but 'build' is not in any previous stage")
- end
- end
end
context 'when using 101 needs' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
index 3424e7d03a3..5d8a9358e10 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
@@ -34,10 +34,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
described_class.new(seed_context, stages_attributes)
end
- before do
- stub_feature_flags(ci_same_stage_job_needs: false)
- end
-
describe '#stages' do
it 'returns the stage resources' do
stages = seed.stages
diff --git a/spec/lib/gitlab/ci/reports/security/flag_spec.rb b/spec/lib/gitlab/ci/reports/security/flag_spec.rb
new file mode 100644
index 00000000000..27f83694ac2
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/flag_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Flag do
+ subject(:security_flag) { described_class.new(type: 'flagged-as-likely-false-positive', origin: 'post analyzer X', description: 'static string to sink') }
+
+ describe '#initialize' do
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ type: 'flagged-as-likely-false-positive',
+ origin: 'post analyzer X',
+ description: 'static string to sink'
+ )
+ end
+ end
+
+ describe '#to_hash' do
+ it 'returns expected hash' do
+ expect(security_flag.to_hash).to eq(
+ {
+ flag_type: :false_positive,
+ origin: 'post analyzer X',
+ description: 'static string to sink'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/backoff_spec.rb b/spec/lib/gitlab/ci/trace/backoff_spec.rb
new file mode 100644
index 00000000000..0fb7e81c6c5
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/backoff_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Trace::Backoff do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:backoff) { described_class.new(archival_attempts) }
+
+ it 'keeps the MAX_ATTEMPTS limit in sync' do
+ expect(Ci::BuildTraceMetadata::MAX_ATTEMPTS).to eq(5)
+ end
+
+ it 'keeps the Redis TTL limit in sync' do
+ expect(Ci::BuildTraceChunks::RedisBase::CHUNK_REDIS_TTL).to eq(7.days)
+ end
+
+ describe '#value' do
+ where(:archival_attempts, :result) do
+ 1 | 9.6
+ 2 | 19.2
+ 3 | 28.8
+ 4 | 38.4
+ 5 | 48.0
+ end
+
+ with_them do
+ subject { backoff.value }
+
+ it { is_expected.to eq(result.hours) }
+ end
+ end
+
+ describe '#value_with_jitter' do
+ where(:archival_attempts, :min_value, :max_value) do
+ 1 | 9.6 | 13.6
+ 2 | 19.2 | 23.2
+ 3 | 28.8 | 32.8
+ 4 | 38.4 | 42.4
+ 5 | 48.0 | 52.0
+ end
+
+ with_them do
+ subject { backoff.value_with_jitter }
+
+ it { is_expected.to be_in(min_value.hours..max_value.hours) }
+ end
+ end
+
+ it 'all retries are happening under the 7 days limit' do
+ backoff_total = 1.upto(Ci::BuildTraceMetadata::MAX_ATTEMPTS).sum do |attempt|
+ backoff = described_class.new(attempt)
+ expect(backoff).to receive(:rand)
+ .with(described_class::MAX_JITTER_VALUE)
+ .and_return(described_class::MAX_JITTER_VALUE)
+
+ backoff.value_with_jitter
+ end
+
+ expect(backoff_total).to be < Ci::BuildTraceChunks::RedisBase::CHUNK_REDIS_TTL
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 69f56871740..1a31b2dad56 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -130,4 +130,18 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_defa
end
end
end
+
+ describe '#can_attempt_archival_now?' do
+ it 'creates the record and returns true' do
+ expect(trace.can_attempt_archival_now?).to be_truthy
+ end
+ end
+
+ describe '#increment_archival_attempts!' do
+ it 'creates the record and increments its value' do
+ expect { trace.increment_archival_attempts! }
+ .to change { build.reload.trace_metadata&.archival_attempts }.from(nil).to(1)
+ .and change { build.reload.trace_metadata&.last_archival_attempt_at }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/variables/collection/sort_spec.rb b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
index 01eef673c35..7e4e9602a92 100644
--- a/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection/sort_spec.rb
@@ -5,20 +5,10 @@ require 'rspec-parameterized'
RSpec.describe Gitlab::Ci::Variables::Collection::Sort do
describe '#initialize with non-Collection value' do
- context 'when FF :variable_inside_variable is disabled' do
- subject { Gitlab::Ci::Variables::Collection::Sort.new([]) }
+ subject { Gitlab::Ci::Variables::Collection::Sort.new([]) }
- it 'raises ArgumentError' do
- expect { subject }.to raise_error(ArgumentError, /Collection object was expected/)
- end
- end
-
- context 'when FF :variable_inside_variable is enabled' do
- subject { Gitlab::Ci::Variables::Collection::Sort.new([]) }
-
- it 'raises ArgumentError' do
- expect { subject }.to raise_error(ArgumentError, /Collection object was expected/)
- end
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError, /Collection object was expected/)
end
end
@@ -182,5 +172,33 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Sort do
expect { subject }.to raise_error(TSort::Cyclic)
end
end
+
+ context 'with overridden variables' do
+ let(:variables) do
+ [
+ { key: 'PROJECT_VAR', value: '$SUBGROUP_VAR' },
+ { key: 'SUBGROUP_VAR', value: '$TOP_LEVEL_GROUP_NAME' },
+ { key: 'SUBGROUP_VAR', value: '$SUB_GROUP_NAME' },
+ { key: 'TOP_LEVEL_GROUP_NAME', value: 'top-level-group' },
+ { key: 'SUB_GROUP_NAME', value: 'vars-in-vars-subgroup' }
+ ]
+ end
+
+ let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) }
+
+ subject do
+ Gitlab::Ci::Variables::Collection::Sort.new(collection).tsort.map { |v| { v[:key] => v.value } }
+ end
+
+ it 'preserves relative order of overridden variables' do
+ is_expected.to eq([
+ { 'TOP_LEVEL_GROUP_NAME' => 'top-level-group' },
+ { 'SUBGROUP_VAR' => '$TOP_LEVEL_GROUP_NAME' },
+ { 'SUB_GROUP_NAME' => 'vars-in-vars-subgroup' },
+ { 'SUBGROUP_VAR' => '$SUB_GROUP_NAME' },
+ { 'PROJECT_VAR' => '$SUBGROUP_VAR' }
+ ])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb
index abda27f0d6e..7ba98380986 100644
--- a/spec/lib/gitlab/ci/variables/collection_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection_spec.rb
@@ -123,17 +123,102 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
end
describe '#[]' do
- variable = { key: 'VAR', value: 'value', public: true, masked: false }
+ subject { Gitlab::Ci::Variables::Collection.new(variables)[var_name] }
- collection = described_class.new([variable])
+ shared_examples 'an array access operator' do
+ context 'for a non-existent variable name' do
+ let(:var_name) { 'UNKNOWN_VAR' }
- it 'returns nil for a non-existent variable name' do
- expect(collection['UNKNOWN_VAR']).to be_nil
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'for an existent variable name' do
+ let(:var_name) { 'VAR' }
+
+ it 'returns the last Item' do
+ is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection::Item)
+ expect(subject.to_runner_variable).to eq(variables.last)
+ end
+ end
+ end
+
+ context 'with variable key with single entry' do
+ let(:variables) do
+ [
+ { key: 'VAR', value: 'value', public: true, masked: false }
+ ]
+ end
+
+ it_behaves_like 'an array access operator'
+ end
+
+ context 'with variable key with multiple entries' do
+ let(:variables) do
+ [
+ { key: 'VAR', value: 'value', public: true, masked: false },
+ { key: 'VAR', value: 'override value', public: true, masked: false }
+ ]
+ end
+
+ it_behaves_like 'an array access operator'
end
+ end
+
+ describe '#all' do
+ subject { described_class.new(variables).all(var_name) }
- it 'returns Item for an existent variable name' do
- expect(collection['VAR']).to be_an_instance_of(Gitlab::Ci::Variables::Collection::Item)
- expect(collection['VAR'].to_runner_variable).to eq(variable)
+ shared_examples 'a method returning all known variables or nil' do
+ context 'for a non-existent variable name' do
+ let(:var_name) { 'UNKNOWN_VAR' }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'for an existing variable name' do
+ let(:var_name) { 'VAR' }
+
+ it 'returns all expected Items' do
+ is_expected.to eq(expected_variables.map { |v| Gitlab::Ci::Variables::Collection::Item.fabricate(v) })
+ end
+ end
+ end
+
+ context 'with variable key with single entry' do
+ let(:variables) do
+ [
+ { key: 'VAR', value: 'value', public: true, masked: false }
+ ]
+ end
+
+ it_behaves_like 'a method returning all known variables or nil' do
+ let(:expected_variables) do
+ [
+ { key: 'VAR', value: 'value', public: true, masked: false }
+ ]
+ end
+ end
+ end
+
+ context 'with variable key with multiple entries' do
+ let(:variables) do
+ [
+ { key: 'VAR', value: 'value', public: true, masked: false },
+ { key: 'VAR', value: 'override value', public: true, masked: false }
+ ]
+ end
+
+ it_behaves_like 'a method returning all known variables or nil' do
+ let(:expected_variables) do
+ [
+ { key: 'VAR', value: 'value', public: true, masked: false },
+ { key: 'VAR', value: 'override value', public: true, masked: false }
+ ]
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 49a470f9e01..1591c2e6b60 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -590,14 +590,6 @@ module Gitlab
end
it_behaves_like 'has warnings and expected error', /build job: need test is not defined in current or prior stages/
-
- context 'with ci_same_stage_job_needs FF disabled' do
- before do
- stub_feature_flags(ci_same_stage_job_needs: false)
- end
-
- it_behaves_like 'has warnings and expected error', /build job: need test is not defined in prior stages/
- end
end
end
end
@@ -1809,14 +1801,6 @@ module Gitlab
let(:dependencies) { ['deploy'] }
it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in current or prior stages'
-
- context 'with ci_same_stage_job_needs FF disabled' do
- before do
- stub_feature_flags(ci_same_stage_job_needs: false)
- end
-
- it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in prior stages'
- end
end
context 'when a job depends on another job that references a not-yet defined stage' do
@@ -2053,14 +2037,6 @@ module Gitlab
let(:needs) { ['deploy'] }
it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in current or prior stages'
-
- context 'with ci_same_stage_job_needs FF disabled' do
- before do
- stub_feature_flags(ci_same_stage_job_needs: false)
- end
-
- it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in prior stages'
- end
end
context 'needs and dependencies that are mismatching' do
diff --git a/spec/lib/gitlab/config/loader/yaml_spec.rb b/spec/lib/gitlab/config/loader/yaml_spec.rb
index 731ee12d7f4..be568a8e5f9 100644
--- a/spec/lib/gitlab/config/loader/yaml_spec.rb
+++ b/spec/lib/gitlab/config/loader/yaml_spec.rb
@@ -15,6 +15,24 @@ RSpec.describe Gitlab::Config::Loader::Yaml do
YAML
end
+ context 'when max yaml size and depth are set in ApplicationSetting' do
+ let(:yaml_size) { 2.megabytes }
+ let(:yaml_depth) { 200 }
+
+ before do
+ stub_application_setting(max_yaml_size_bytes: yaml_size, max_yaml_depth: yaml_depth)
+ end
+
+ it 'uses ApplicationSetting values rather than the defaults' do
+ expect(Gitlab::Utils::DeepSize)
+ .to receive(:new)
+ .with(any_args, { max_size: yaml_size, max_depth: yaml_depth })
+ .and_call_original
+
+ loader.load!
+ end
+ end
+
context 'when yaml syntax is correct' do
let(:yml) { 'image: ruby:2.7' }
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index b9e0132badb..8053f5261c0 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::StageSummary do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:options) { { from: 1.day.ago } }
let(:args) { { options: options, current_user: user } }
let(:user) { create(:user, :admin) }
@@ -62,6 +63,8 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
describe "#commits" do
+ let!(:project) { create(:project, :repository) }
+
subject { stage_summary.second }
context 'when from date is given' do
@@ -132,115 +135,5 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
end
- describe "#deploys" do
- subject { stage_summary.third }
-
- context 'when from date is given' do
- before do
- Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
- Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
- end
-
- it "finds the number of deploys made created after the 'from date'" do
- expect(subject[:value]).to eq('1')
- end
-
- it 'returns the localized title' do
- Gitlab::I18n.with_locale(:ru) do
- expect(subject[:title]).to eq(n_('Deploy', 'Deploys', 1))
- end
- end
- end
-
- it "doesn't find commits from other projects" do
- Timecop.freeze(5.days.from_now) do
- create(:deployment, :success, project: create(:project, :repository))
- end
-
- expect(subject[:value]).to eq('-')
- end
-
- context 'when `to` parameter is given' do
- before do
- Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
- Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
- end
-
- it "doesn't find any record" do
- options[:to] = Time.now
-
- expect(subject[:value]).to eq('-')
- end
-
- it "finds records created between `from` and `to` range" do
- options[:from] = 10.days.ago
- options[:to] = 10.days.from_now
-
- expect(subject[:value]).to eq('2')
- end
- end
- end
-
- describe '#deployment_frequency' do
- subject { stage_summary.fourth[:value] }
-
- it 'includes the unit: `per day`' do
- expect(stage_summary.fourth[:unit]).to eq _('per day')
- end
-
- before do
- Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
- end
-
- it 'returns 0.0 when there were deploys but the frequency was too low' do
- options[:from] = 30.days.ago
-
- # 1 deployment over 30 days
- # frequency of 0.03, rounded off to 0.0
- expect(subject).to eq('0')
- end
-
- it 'returns `-` when there were no deploys' do
- options[:from] = 4.days.ago
-
- # 0 deployment in the last 4 days
- expect(subject).to eq('-')
- end
-
- context 'when `to` is nil' do
- it 'includes range until now' do
- options[:from] = 6.days.ago
- options[:to] = nil
-
- # 1 deployment over 7 days
- expect(subject).to eq('0.1')
- end
- end
-
- context 'when `to` is given' do
- before do
- Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project, finished_at: Time.zone.now) }
- end
-
- it 'finds records created between `from` and `to` range' do
- options[:from] = 10.days.ago
- options[:to] = 10.days.from_now
-
- # 2 deployments over 20 days
- expect(subject).to eq('0.1')
- end
-
- context 'when `from` and `to` are within a day' do
- it 'returns the number of deployments made on that day' do
- freeze_time do
- create(:deployment, :success, project: project, finished_at: Time.zone.now)
- options[:from] = Time.zone.now.at_beginning_of_day
- options[:to] = Time.zone.now.at_end_of_day
-
- expect(subject).to eq('1')
- end
- end
- end
- end
- end
+ it_behaves_like 'deployment metrics examples'
end
diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
index ed15951dfb0..eb16a8ccfa5 100644
--- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
@@ -150,6 +150,23 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
migration.prepare_async_index(table_name, 'id')
end.not_to change { index_model.where(name: index_name).count }
end
+
+ it 'updates definition if changed' do
+ index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: '...')
+
+ expect do
+ migration.prepare_async_index(table_name, 'id', name: index_name)
+ end.to change { index.reload.definition }
+ end
+
+ it 'does not update definition if not changed' do
+ definition = "CREATE INDEX CONCURRENTLY \"index_#{table_name}_on_id\" ON \"#{table_name}\" (\"id\")"
+ index = create(:postgres_async_index, table_name: table_name, name: index_name, definition: definition)
+
+ expect do
+ migration.prepare_async_index(table_name, 'id', name: index_name)
+ end.not_to change { index.reload.updated_at }
+ end
end
context 'when the async index table does not exist' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 3207e97a639..a1c2634f59c 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -234,6 +234,42 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
+ describe '#retry_failed_jobs!' do
+ let(:batched_migration) { create(:batched_background_migration, status: 'failed') }
+
+ subject(:retry_failed_jobs) { batched_migration.retry_failed_jobs! }
+
+ context 'when there are failed migration jobs' do
+ let!(:batched_background_migration_job) { create(:batched_background_migration_job, batched_migration: batched_migration, batch_size: 10, min_value: 6, max_value: 15, status: :failed, attempts: 3) }
+
+ before do
+ allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
+ allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
+ end
+ end
+
+ it 'moves the status of the migration to active' do
+ retry_failed_jobs
+
+ expect(batched_migration.status).to eql 'active'
+ end
+
+ it 'changes the number of attempts to 0' do
+ retry_failed_jobs
+
+ expect(batched_background_migration_job.reload.attempts).to be_zero
+ end
+ end
+
+ context 'when there are no failed migration jobs' do
+ it 'moves the status of the migration to active' do
+ retry_failed_jobs
+
+ expect(batched_migration.status).to eql 'active'
+ end
+ end
+ end
+
describe '#job_class_name=' do
it_behaves_like 'an attr_writer that demodulizes assigned class names', :job_class_name
end
diff --git a/spec/lib/gitlab/database/connection_spec.rb b/spec/lib/gitlab/database/connection_spec.rb
index 5e0e6039afc..7f94d7af4a9 100644
--- a/spec/lib/gitlab/database/connection_spec.rb
+++ b/spec/lib/gitlab/database/connection_spec.rb
@@ -5,29 +5,14 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Connection do
let(:connection) { described_class.new }
- describe '#default_pool_size' do
- before do
- allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
- end
-
- it 'returns the max thread size plus a fixed headroom of 10' do
- expect(connection.default_pool_size).to eq(17)
- end
-
- it 'returns the max thread size plus a DB_POOL_HEADROOM if this env var is present' do
- stub_env('DB_POOL_HEADROOM', '7')
-
- expect(connection.default_pool_size).to eq(14)
- end
- end
-
describe '#config' do
it 'returns a HashWithIndifferentAccess' do
expect(connection.config).to be_an_instance_of(HashWithIndifferentAccess)
end
it 'returns a default pool size' do
- expect(connection.config).to include(pool: connection.default_pool_size)
+ expect(connection.config)
+ .to include(pool: Gitlab::Database.default_pool_size)
end
it 'does not cache its results' do
@@ -43,7 +28,7 @@ RSpec.describe Gitlab::Database::Connection do
it 'returns the default pool size' do
expect(connection).to receive(:config).and_return({ pool: nil })
- expect(connection.pool_size).to eq(connection.default_pool_size)
+ expect(connection.pool_size).to eq(Gitlab::Database.default_pool_size)
end
end
@@ -129,7 +114,7 @@ RSpec.describe Gitlab::Database::Connection do
describe '#db_config_with_default_pool_size' do
it 'returns db_config with our default pool size' do
- allow(connection).to receive(:default_pool_size).and_return(9)
+ allow(Gitlab::Database).to receive(:default_pool_size).and_return(9)
expect(connection.db_config_with_default_pool_size.pool).to eq(9)
end
@@ -143,7 +128,7 @@ RSpec.describe Gitlab::Database::Connection do
describe '#disable_prepared_statements' do
around do |example|
- original_config = ::Gitlab::Database.main.config
+ original_config = connection.scope.connection.pool.db_config
example.run
@@ -162,6 +147,12 @@ RSpec.describe Gitlab::Database::Connection do
expect(connection.scope.connection.prepared_statements).to eq(false)
end
+ it 'retains the connection name' do
+ connection.disable_prepared_statements
+
+ expect(connection.scope.connection_db_config.name).to eq('main')
+ end
+
context 'with dynamic connection pool size' do
before do
connection.scope.establish_connection(connection.config.merge(pool: 7))
@@ -393,34 +384,28 @@ RSpec.describe Gitlab::Database::Connection do
end
describe '#cached_column_exists?' do
- it 'only retrieves data once' do
- expect(connection.scope.connection)
- .to receive(:columns)
- .once.and_call_original
-
- 2.times do
- expect(connection.cached_column_exists?(:projects, :id)).to be_truthy
- expect(connection.cached_column_exists?(:projects, :bogus_column)).to be_falsey
+ it 'only retrieves the data from the schema cache' do
+ queries = ActiveRecord::QueryRecorder.new do
+ 2.times do
+ expect(connection.cached_column_exists?(:projects, :id)).to be_truthy
+ expect(connection.cached_column_exists?(:projects, :bogus_column)).to be_falsey
+ end
end
+
+ expect(queries.count).to eq(0)
end
end
describe '#cached_table_exists?' do
- it 'only retrieves data once per table' do
- expect(connection.scope.connection)
- .to receive(:data_source_exists?)
- .with(:projects)
- .once.and_call_original
-
- expect(connection.scope.connection)
- .to receive(:data_source_exists?)
- .with(:bogus_table_name)
- .once.and_call_original
-
- 2.times do
- expect(connection.cached_table_exists?(:projects)).to be_truthy
- expect(connection.cached_table_exists?(:bogus_table_name)).to be_falsey
+ it 'only retrieves the data from the schema cache' do
+ queries = ActiveRecord::QueryRecorder.new do
+ 2.times do
+ expect(connection.cached_table_exists?(:projects)).to be_truthy
+ expect(connection.cached_table_exists?(:bogus_table_name)).to be_falsey
+ end
end
+
+ expect(queries.count).to eq(0)
end
it 'returns false when database does not exist' do
@@ -433,16 +418,14 @@ RSpec.describe Gitlab::Database::Connection do
end
describe '#exists?' do
- it 'returns true if `ActiveRecord::Base.connection` succeeds' do
- expect(connection.scope).to receive(:connection)
-
+ it 'returns true if the database exists' do
expect(connection.exists?).to be(true)
end
- it 'returns false if `ActiveRecord::Base.connection` fails' do
- expect(connection.scope).to receive(:connection) do
- raise ActiveRecord::NoDatabaseError, 'broken'
- end
+ it "returns false if the database doesn't exist" do
+ expect(connection.scope.connection.schema_cache)
+ .to receive(:database_version)
+ .and_raise(ActiveRecord::NoDatabaseError)
expect(connection.exists?).to be(false)
end
diff --git a/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
new file mode 100644
index 00000000000..ebbbafb855f
--- /dev/null
+++ b/spec/lib/gitlab/database/load_balancing/action_cable_callbacks_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LoadBalancing::ActionCableCallbacks, :request_store do
+ describe '.wrapper' do
+ it 'uses primary and then releases the connection and clears the session' do
+ expect(Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
+ expect(Gitlab::Database::LoadBalancing::Session).to receive(:clear_session)
+
+ described_class.wrapper.call(
+ nil,
+ lambda do
+ expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).to eq(true)
+ end
+ )
+ end
+
+ context 'with an exception' do
+ it 'releases the connection and clears the session' do
+ expect(Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
+ expect(Gitlab::Database::LoadBalancing::Session).to receive(:clear_session)
+
+ expect do
+ described_class.wrapper.call(nil, lambda { raise 'test_exception' })
+ end.to raise_error('test_exception')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
new file mode 100644
index 00000000000..6621e6276a5
--- /dev/null
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -0,0 +1,175 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LoadBalancing::Configuration do
+ let(:model) do
+ config = ActiveRecord::DatabaseConfigurations::HashConfig
+ .new('main', 'test', configuration_hash)
+
+ double(:model, connection_db_config: config)
+ end
+
+ describe '.for_model' do
+ context 'when load balancing is not configured' do
+ let(:configuration_hash) { {} }
+
+ it 'uses the default settings' do
+ config = described_class.for_model(model)
+
+ expect(config.hosts).to eq([])
+ expect(config.max_replication_difference).to eq(8.megabytes)
+ expect(config.max_replication_lag_time).to eq(60.0)
+ expect(config.replica_check_interval).to eq(60.0)
+ expect(config.service_discovery).to eq(
+ nameserver: 'localhost',
+ port: 8600,
+ record: nil,
+ record_type: 'A',
+ interval: 60,
+ disconnect_timeout: 120,
+ use_tcp: false
+ )
+ expect(config.pool_size).to eq(Gitlab::Database.default_pool_size)
+ end
+ end
+
+ context 'when load balancing is configured' do
+ let(:configuration_hash) do
+ {
+ pool: 4,
+ load_balancing: {
+ max_replication_difference: 1,
+ max_replication_lag_time: 2,
+ replica_check_interval: 3,
+ hosts: %w[foo bar],
+ discover: {
+ 'record' => 'foo.example.com'
+ }
+ }
+ }
+ end
+
+ it 'uses the custom configuration settings' do
+ config = described_class.for_model(model)
+
+ expect(config.hosts).to eq(%w[foo bar])
+ expect(config.max_replication_difference).to eq(1)
+ expect(config.max_replication_lag_time).to eq(2.0)
+ expect(config.replica_check_interval).to eq(3.0)
+ expect(config.service_discovery).to eq(
+ nameserver: 'localhost',
+ port: 8600,
+ record: 'foo.example.com',
+ record_type: 'A',
+ interval: 60,
+ disconnect_timeout: 120,
+ use_tcp: false
+ )
+ expect(config.pool_size).to eq(4)
+ end
+ end
+
+ context 'when the load balancing configuration uses strings as the keys' do
+ let(:configuration_hash) do
+ {
+ pool: 4,
+ load_balancing: {
+ 'max_replication_difference' => 1,
+ 'max_replication_lag_time' => 2,
+ 'replica_check_interval' => 3,
+ 'hosts' => %w[foo bar],
+ 'discover' => {
+ 'record' => 'foo.example.com'
+ }
+ }
+ }
+ end
+
+ it 'uses the custom configuration settings' do
+ config = described_class.for_model(model)
+
+ expect(config.hosts).to eq(%w[foo bar])
+ expect(config.max_replication_difference).to eq(1)
+ expect(config.max_replication_lag_time).to eq(2.0)
+ expect(config.replica_check_interval).to eq(3.0)
+ expect(config.service_discovery).to eq(
+ nameserver: 'localhost',
+ port: 8600,
+ record: 'foo.example.com',
+ record_type: 'A',
+ interval: 60,
+ disconnect_timeout: 120,
+ use_tcp: false
+ )
+ expect(config.pool_size).to eq(4)
+ end
+ end
+ end
+
+ describe '#load_balancing_enabled?' do
+ it 'returns true when hosts are configured' do
+ config = described_class.new(ActiveRecord::Base, %w[foo bar])
+
+ expect(config.load_balancing_enabled?).to eq(true)
+ end
+
+ it 'returns true when a service discovery record is configured' do
+ config = described_class.new(ActiveRecord::Base)
+ config.service_discovery[:record] = 'foo'
+
+ expect(config.load_balancing_enabled?).to eq(true)
+ end
+
+ it 'returns false when no hosts are configured and service discovery is disabled' do
+ config = described_class.new(ActiveRecord::Base)
+
+ expect(config.load_balancing_enabled?).to eq(false)
+ end
+ end
+
+ describe '#service_discovery_enabled?' do
+ it 'returns true when a record is configured' do
+ config = described_class.new(ActiveRecord::Base)
+ config.service_discovery[:record] = 'foo'
+
+ expect(config.service_discovery_enabled?).to eq(true)
+ end
+
+ it 'returns false when no record is configured' do
+ config = described_class.new(ActiveRecord::Base)
+
+ expect(config.service_discovery_enabled?).to eq(false)
+ end
+ end
+
+ describe '#pool_size' do
+ context 'when a custom pool size is used' do
+ let(:configuration_hash) { { pool: 4 } }
+
+ it 'always reads the value from the model configuration' do
+ config = described_class.new(model)
+
+ expect(config.pool_size).to eq(4)
+
+ # We can't modify `configuration_hash` as it's only used to populate the
+ # internal hash used by ActiveRecord; instead of it being used as-is.
+ allow(model.connection_db_config)
+ .to receive(:configuration_hash)
+ .and_return({ pool: 42 })
+
+ expect(config.pool_size).to eq(42)
+ end
+ end
+
+ context 'when the pool size is nil' do
+ let(:configuration_hash) { {} }
+
+ it 'returns the default pool size' do
+ config = described_class.new(model)
+
+ expect(config.pool_size).to eq(Gitlab::Database.default_pool_size)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
index 0ca99ec9acf..ba2f9485066 100644
--- a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
@@ -3,7 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
- let(:proxy) { described_class.new }
+ let(:proxy) do
+ config = Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base)
+
+ described_class.new(Gitlab::Database::LoadBalancing::LoadBalancer.new(config))
+ end
describe '#select' do
it 'performs a read' do
@@ -35,9 +40,15 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
describe 'using a SELECT FOR UPDATE query' do
it 'runs the query on the primary and sticks to it' do
arel = double(:arel, locked: true)
+ session = Gitlab::Database::LoadBalancing::Session.new
+
+ allow(Gitlab::Database::LoadBalancing::Session).to receive(:current)
+ .and_return(session)
+
+ expect(session).to receive(:write!)
expect(proxy).to receive(:write_using_load_balancer)
- .with(:select_all, arel, 'foo', [], sticky: true)
+ .with(:select_all, arel, 'foo', [])
proxy.select_all(arel, 'foo')
end
@@ -58,8 +69,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
Gitlab::Database::LoadBalancing::ConnectionProxy::STICKY_WRITES.each do |name|
describe "#{name}" do
it 'runs the query on the primary and sticks to it' do
- expect(proxy).to receive(:write_using_load_balancer)
- .with(name, 'foo', sticky: true)
+ session = Gitlab::Database::LoadBalancing::Session.new
+
+ allow(Gitlab::Database::LoadBalancing::Session).to receive(:current)
+ .and_return(session)
+
+ expect(session).to receive(:write!)
+ expect(proxy).to receive(:write_using_load_balancer).with(name, 'foo')
proxy.send(name, 'foo')
end
@@ -108,7 +124,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
# We have an extra test for #transaction here to make sure that nested queries
# are also sent to a primary.
describe '#transaction' do
- let(:session) { double(:session) }
+ let(:session) { Gitlab::Database::LoadBalancing::Session.new }
before do
allow(Gitlab::Database::LoadBalancing::Session).to receive(:current)
@@ -192,7 +208,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
proxy.foo('foo')
end
- it 'properly forwards trailing hash arguments' do
+ it 'properly forwards keyword arguments' do
allow(proxy.load_balancer).to receive(:read_write)
expect(proxy).to receive(:write_using_load_balancer).and_call_original
@@ -217,7 +233,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
proxy.foo('foo')
end
- it 'properly forwards trailing hash arguments' do
+ it 'properly forwards keyword arguments' do
allow(proxy.load_balancer).to receive(:read)
expect(proxy).to receive(:read_using_load_balancer).and_call_original
@@ -297,20 +313,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
.and_return(session)
end
- it 'uses but does not stick to the primary when sticking is disabled' do
+ it 'uses but does not stick to the primary' do
expect(proxy.load_balancer).to receive(:read_write).and_yield(connection)
expect(connection).to receive(:foo).with('foo')
expect(session).not_to receive(:write!)
proxy.write_using_load_balancer(:foo, 'foo')
end
-
- it 'sticks to the primary when sticking is enabled' do
- expect(proxy.load_balancer).to receive(:read_write).and_yield(connection)
- expect(connection).to receive(:foo).with('foo')
- expect(session).to receive(:write!)
-
- proxy.write_using_load_balancer(:foo, 'foo', sticky: true)
- end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/host_list_spec.rb b/spec/lib/gitlab/database/load_balancing/host_list_spec.rb
index ad4ca18d5e6..9bb8116c434 100644
--- a/spec/lib/gitlab/database/load_balancing/host_list_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_list_spec.rb
@@ -4,7 +4,12 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::HostList do
let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
- let(:load_balancer) { double(:load_balancer) }
+ let(:load_balancer) do
+ Gitlab::Database::LoadBalancing::LoadBalancer.new(
+ Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
+ )
+ end
+
let(:host_count) { 2 }
let(:hosts) { Array.new(host_count) { Gitlab::Database::LoadBalancing::Host.new(db_host, load_balancer, port: 5432) } }
let(:host_list) { described_class.new(hosts) }
diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb
index f42ac8be1bb..e2011692228 100644
--- a/spec/lib/gitlab/database/load_balancing/host_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::Host do
- let(:load_balancer) { Gitlab::Database::LoadBalancing::LoadBalancer.new }
+ let(:load_balancer) do
+ Gitlab::Database::LoadBalancing::LoadBalancer
+ .new(Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base))
+ end
let(:host) do
Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer)
@@ -274,7 +277,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
end
it 'returns false when the data is not recent enough' do
- diff = Gitlab::Database::LoadBalancing.max_replication_difference * 2
+ diff = load_balancer.configuration.max_replication_difference * 2
expect(host)
.to receive(:query_and_release)
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index c647f5a8f5d..86fae14b961 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -5,7 +5,12 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
let(:conflict_error) { Class.new(RuntimeError) }
let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
- let(:lb) { described_class.new([db_host, db_host]) }
+ let(:config) do
+ Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base, [db_host, db_host])
+ end
+
+ let(:lb) { described_class.new(config) }
let(:request_cache) { lb.send(:request_cache) }
before do
@@ -41,6 +46,19 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
top_error
end
+ describe '#initialize' do
+ it 'ignores the hosts when the primary_only option is enabled' do
+ config = Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base, [db_host])
+ lb = described_class.new(config, primary_only: true)
+ hosts = lb.host_list.hosts
+
+ expect(hosts.length).to eq(1)
+ expect(hosts.first)
+ .to be_instance_of(Gitlab::Database::LoadBalancing::PrimaryHost)
+ end
+ end
+
describe '#read' do
it 'yields a connection for a read' do
connection = double(:connection)
@@ -121,6 +139,19 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
expect { |b| lb.read(&b) }
.to yield_with_args(ActiveRecord::Base.retrieve_connection)
end
+
+ it 'uses the primary when the primary_only option is enabled' do
+ config = Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base)
+ lb = described_class.new(config, primary_only: true)
+
+ # When no hosts are configured, we don't want to produce any warnings, as
+ # they aren't useful/too noisy.
+ expect(Gitlab::Database::LoadBalancing::Logger).not_to receive(:warn)
+
+ expect { |b| lb.read(&b) }
+ .to yield_with_args(ActiveRecord::Base.retrieve_connection)
+ end
end
describe '#read_write' do
@@ -152,8 +183,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
it 'does not create conflicts with other load balancers when caching hosts' do
- lb1 = described_class.new([db_host, db_host], ActiveRecord::Base)
- lb2 = described_class.new([db_host, db_host], Ci::CiDatabaseRecord)
+ ci_config = Gitlab::Database::LoadBalancing::Configuration
+ .new(Ci::CiDatabaseRecord, [db_host, db_host])
+
+ lb1 = described_class.new(config)
+ lb2 = described_class.new(ci_config)
host1 = lb1.host
host2 = lb2.host
@@ -283,6 +317,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
expect(lb.connection_error?(error)).to eq(false)
end
+
+ it 'returns false for ActiveRecord errors without a cause' do
+ error = ActiveRecord::RecordNotUnique.new
+
+ expect(lb.connection_error?(error)).to eq(false)
+ end
end
describe '#serialization_failure?' do
diff --git a/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb b/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb
new file mode 100644
index 00000000000..a0e63a7ee4e
--- /dev/null
+++ b/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LoadBalancing::PrimaryHost do
+ let(:load_balancer) do
+ Gitlab::Database::LoadBalancing::LoadBalancer.new(
+ Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
+ )
+ end
+
+ let(:host) { Gitlab::Database::LoadBalancing::PrimaryHost.new(load_balancer) }
+
+ describe '#connection' do
+ it 'returns a connection from the pool' do
+ expect(load_balancer.pool).to receive(:connection)
+
+ host.connection
+ end
+ end
+
+ describe '#release_connection' do
+ it 'does nothing' do
+ expect(host.release_connection).to be_nil
+ end
+ end
+
+ describe '#enable_query_cache!' do
+ it 'does nothing' do
+ expect(host.enable_query_cache!).to be_nil
+ end
+ end
+
+ describe '#disable_query_cache!' do
+ it 'does nothing' do
+ expect(host.disable_query_cache!).to be_nil
+ end
+ end
+
+ describe '#query_cache_enabled' do
+ it 'delegates to the primary connection pool' do
+ expect(host.query_cache_enabled)
+ .to eq(load_balancer.pool.query_cache_enabled)
+ end
+ end
+
+ describe '#disconnect!' do
+ it 'does nothing' do
+ expect(host.disconnect!).to be_nil
+ end
+ end
+
+ describe '#offline!' do
+ it 'does nothing' do
+ expect(host.offline!).to be_nil
+ end
+ end
+
+ describe '#online?' do
+ it 'returns true' do
+ expect(host.online?).to eq(true)
+ end
+ end
+
+ describe '#primary_write_location' do
+ it 'returns the write location of the primary' do
+ expect(host.primary_write_location).to be_an_instance_of(String)
+ expect(host.primary_write_location).not_to be_empty
+ end
+ end
+
+ describe '#caught_up?' do
+ it 'returns true' do
+ expect(host.caught_up?('foo')).to eq(true)
+ end
+ end
+
+ describe '#database_replica_location' do
+ let(:connection) { double(:connection) }
+
+ it 'returns the write ahead location of the replica', :aggregate_failures do
+ expect(host)
+ .to receive(:query_and_release)
+ .and_return({ 'location' => '0/D525E3A8' })
+
+ expect(host.database_replica_location).to be_an_instance_of(String)
+ end
+
+ it 'returns nil when the database query returned no rows' do
+ expect(host).to receive(:query_and_release).and_return({})
+
+ expect(host.database_replica_location).to be_nil
+ end
+
+ it 'returns nil when the database connection fails' do
+ allow(host).to receive(:connection).and_raise(PG::Error)
+
+ expect(host.database_replica_location).to be_nil
+ end
+ end
+
+ describe '#query_and_release' do
+ it 'executes a SQL query' do
+ results = host.query_and_release('SELECT 10 AS number')
+
+ expect(results).to be_an_instance_of(Hash)
+ expect(results['number'].to_i).to eq(10)
+ end
+
+ it 'releases the connection after running the query' do
+ expect(host)
+ .to receive(:release_connection)
+ .once
+
+ host.query_and_release('SELECT 10 AS number')
+ end
+
+ it 'returns an empty Hash in the event of an error' do
+ expect(host.connection)
+ .to receive(:select_all)
+ .and_raise(RuntimeError, 'kittens')
+
+ expect(host.query_and_release('SELECT 10 AS number')).to eq({})
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
index a27341a3324..e9bc465b1c7 100644
--- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
@@ -3,13 +3,18 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
- let(:load_balancer) { Gitlab::Database::LoadBalancing::LoadBalancer.new([]) }
+ let(:load_balancer) do
+ Gitlab::Database::LoadBalancing::LoadBalancer.new(
+ Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
+ )
+ end
+
let(:service) do
described_class.new(
+ load_balancer,
nameserver: 'localhost',
port: 8600,
- record: 'foo',
- load_balancer: load_balancer
+ record: 'foo'
)
end
@@ -26,11 +31,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
describe ':record_type' do
subject do
described_class.new(
+ load_balancer,
nameserver: 'localhost',
port: 8600,
record: 'foo',
- record_type: record_type,
- load_balancer: load_balancer
+ record_type: record_type
)
end
@@ -69,18 +74,69 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
end
describe '#perform_service_discovery' do
- it 'reports exceptions to Sentry' do
- error = StandardError.new
+ context 'without any failures' do
+ it 'runs once' do
+ expect(service)
+ .to receive(:refresh_if_necessary).once
+
+ expect(service).not_to receive(:sleep)
+
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ service.perform_service_discovery
+ end
+ end
+ context 'with failures' do
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:track_exception)
+ allow(service).to receive(:sleep)
+ end
+
+ let(:valid_retry_sleep_duration) { satisfy { |val| described_class::RETRY_DELAY_RANGE.include?(val) } }
+
+ it 'retries service discovery when under the retry limit' do
+ error = StandardError.new
+
+ expect(service)
+ .to receive(:refresh_if_necessary)
+ .and_raise(error).exactly(described_class::MAX_DISCOVERY_RETRIES - 1).times.ordered
+
+ expect(service)
+ .to receive(:sleep).with(valid_retry_sleep_duration)
+ .exactly(described_class::MAX_DISCOVERY_RETRIES - 1).times
+
+ expect(service).to receive(:refresh_if_necessary).and_return(45).ordered
+
+ expect(service.perform_service_discovery).to eq(45)
+ end
+
+ it 'does not retry service discovery after exceeding the limit' do
+ error = StandardError.new
+
+ expect(service)
+ .to receive(:refresh_if_necessary)
+ .and_raise(error).exactly(described_class::MAX_DISCOVERY_RETRIES).times
+
+ expect(service)
+ .to receive(:sleep).with(valid_retry_sleep_duration)
+ .exactly(described_class::MAX_DISCOVERY_RETRIES).times
+
+ service.perform_service_discovery
+ end
- expect(service)
- .to receive(:refresh_if_necessary)
- .and_raise(error)
+ it 'reports exceptions to Sentry' do
+ error = StandardError.new
+
+ expect(service)
+ .to receive(:refresh_if_necessary)
+ .and_raise(error).exactly(described_class::MAX_DISCOVERY_RETRIES).times
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(error)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(error).exactly(described_class::MAX_DISCOVERY_RETRIES).times
- service.perform_service_discovery
+ service.perform_service_discovery
+ end
end
end
@@ -133,7 +189,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
let(:address_bar) { described_class::Address.new('bar') }
let(:load_balancer) do
- Gitlab::Database::LoadBalancing::LoadBalancer.new([address_foo])
+ Gitlab::Database::LoadBalancing::LoadBalancer.new(
+ Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base, [address_foo])
+ )
end
before do
@@ -166,11 +225,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
describe '#addresses_from_dns' do
let(:service) do
described_class.new(
+ load_balancer,
nameserver: 'localhost',
port: 8600,
record: 'foo',
- record_type: record_type,
- load_balancer: load_balancer
+ record_type: record_type
)
end
@@ -224,6 +283,16 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
expect(service.addresses_from_dns).to eq([90, addresses])
end
end
+
+ context 'when the resolver returns an empty response' do
+ let(:packet) { double(:packet, answer: []) }
+
+ let(:record_type) { 'A' }
+
+ it 'raises EmptyDnsResponse' do
+ expect { service.addresses_from_dns }.to raise_error(Gitlab::Database::LoadBalancing::ServiceDiscovery::EmptyDnsResponse)
+ end
+ end
end
describe '#new_wait_time_for' do
@@ -246,7 +315,10 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
describe '#addresses_from_load_balancer' do
let(:load_balancer) do
- Gitlab::Database::LoadBalancing::LoadBalancer.new(%w[b a])
+ Gitlab::Database::LoadBalancing::LoadBalancer.new(
+ Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base, %w[b a])
+ )
end
it 'returns the ordered host names of the load balancer' do
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
index 54050a87af0..f683ade978a 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
@@ -58,8 +58,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
it 'does not pass database locations', :aggregate_failures do
run_middleware
- expect(job['database_replica_location']).to be_nil
- expect(job['database_write_location']).to be_nil
+ expect(job['wal_locations']).to be_nil
end
include_examples 'job data consistency'
@@ -86,11 +85,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes database_replica_location' do
+ expected_location = { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location }
+
expect(load_balancer).to receive_message_chain(:host, "database_replica_location").and_return(location)
run_middleware
- expect(job['database_replica_location']).to eq(location)
+ expect(job['wal_locations']).to eq(expected_location)
end
include_examples 'job data consistency'
@@ -102,40 +103,56 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes primary write location', :aggregate_failures do
+ expected_location = { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location }
+
expect(load_balancer).to receive(:primary_write_location).and_return(location)
run_middleware
- expect(job['database_write_location']).to eq(location)
+ expect(job['wal_locations']).to eq(expected_location)
end
include_examples 'job data consistency'
end
end
- shared_examples_for 'database location was already provided' do |provided_database_location, other_location|
- shared_examples_for 'does not set database location again' do |use_primary|
- before do
- allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(use_primary)
- end
+ context 'when worker cannot be constantized' do
+ let(:worker_class) { 'ActionMailer::MailDeliveryJob' }
+ let(:expected_consistency) { :always }
- it 'does not set database locations again' do
- run_middleware
+ include_examples 'does not pass database locations'
+ end
- expect(job[provided_database_location]).to eq(old_location)
- expect(job[other_location]).to be_nil
- end
- end
+ context 'when worker class does not include ApplicationWorker' do
+ let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper }
+ let(:expected_consistency) { :always }
+
+ include_examples 'does not pass database locations'
+ end
+ context 'database wal location was already provided' do
let(:old_location) { '0/D525E3A8' }
let(:new_location) { 'AB/12345' }
- let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", provided_database_location => old_location } }
+ let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => old_location } }
+ let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } }
before do
allow(load_balancer).to receive(:primary_write_location).and_return(new_location)
allow(load_balancer).to receive(:database_replica_location).and_return(new_location)
end
+ shared_examples_for 'does not set database location again' do |use_primary|
+ before do
+ allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary?).and_return(use_primary)
+ end
+
+ it 'does not set database locations again' do
+ run_middleware
+
+ expect(job['wal_locations']).to eq(wal_locations)
+ end
+ end
+
context "when write was performed" do
include_examples 'does not set database location again', true
end
@@ -145,28 +162,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
end
- context 'when worker cannot be constantized' do
- let(:worker_class) { 'ActionMailer::MailDeliveryJob' }
- let(:expected_consistency) { :always }
-
- include_examples 'does not pass database locations'
- end
-
- context 'when worker class does not include ApplicationWorker' do
- let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper }
- let(:expected_consistency) { :always }
-
- include_examples 'does not pass database locations'
- end
-
- context 'database write location was already provided' do
- include_examples 'database location was already provided', 'database_write_location', 'database_replica_location'
- end
-
- context 'database replica location was already provided' do
- include_examples 'database location was already provided', 'database_replica_location', 'database_write_location'
- end
-
context 'when worker data consistency is :always' do
include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index 14f240cd159..9f23eb0094f 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -62,9 +62,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
include_examples 'load balancing strategy', expected_strategy
end
- shared_examples_for 'replica is up to date' do |location, expected_strategy|
+ shared_examples_for 'replica is up to date' do |expected_strategy|
+ let(:location) {'0/D525E3A8' }
+ let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } }
+
it 'does not stick to the primary', :aggregate_failures do
- expect(middleware).to receive(:replica_caught_up?).with(location).and_return(true)
+ expect(load_balancer).to receive(:select_up_to_date_host).with(location).and_return(true)
run_middleware do
expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).not_to be_truthy
@@ -85,30 +88,40 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
include_examples 'stick to the primary', 'primary'
end
- context 'when database replica location is set' do
- let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_replica_location' => '0/D525E3A8' } }
+ context 'when database wal location is set' do
+ let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'wal_locations' => wal_locations } }
+
+ before do
+ allow(load_balancer).to receive(:select_up_to_date_host).with(location).and_return(true)
+ end
+
+ it_behaves_like 'replica is up to date', 'replica'
+ end
+
+ context 'when deduplication wal location is set' do
+ let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'dedup_wal_locations' => wal_locations } }
before do
- allow(middleware).to receive(:replica_caught_up?).and_return(true)
+ allow(load_balancer).to receive(:select_up_to_date_host).with(wal_locations[:main]).and_return(true)
end
- it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica'
+ it_behaves_like 'replica is up to date', 'replica'
end
- context 'when database primary location is set' do
+ context 'when legacy wal location is set' do
let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_write_location' => '0/D525E3A8' } }
before do
- allow(middleware).to receive(:replica_caught_up?).and_return(true)
+ allow(load_balancer).to receive(:select_up_to_date_host).with('0/D525E3A8').and_return(true)
end
- it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica'
+ it_behaves_like 'replica is up to date', 'replica'
end
context 'when database location is not set' do
let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } }
- it_behaves_like 'stick to the primary', 'primary_no_wal'
+ include_examples 'stick to the primary', 'primary_no_wal'
end
end
@@ -167,7 +180,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
replication_lag!(false)
end
- it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica_retried'
+ include_examples 'replica is up to date', 'replica_retried'
end
end
end
@@ -178,7 +191,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
context 'when replica is not up to date' do
before do
- allow(middleware).to receive(:replica_caught_up?).and_return(false)
+ allow(load_balancer).to receive(:select_up_to_date_host).and_return(false)
end
include_examples 'stick to the primary', 'primary'
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index 6ec8e0516f6..f40ad444081 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -40,106 +40,25 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
describe '.configuration' do
- it 'returns a Hash' do
- lb_config = { 'hosts' => %w(foo) }
+ it 'returns the configuration for the load balancer' do
+ raw = ActiveRecord::Base.connection_db_config.configuration_hash
+ cfg = described_class.configuration
- original_db_config = Gitlab::Database.main.config
- modified_db_config = original_db_config.merge(load_balancing: lb_config)
- expect(Gitlab::Database.main).to receive(:config).and_return(modified_db_config)
-
- expect(described_class.configuration).to eq(lb_config)
- end
- end
-
- describe '.max_replication_difference' do
- context 'without an explicitly configured value' do
- it 'returns the default value' do
- allow(described_class)
- .to receive(:configuration)
- .and_return({})
-
- expect(described_class.max_replication_difference).to eq(8.megabytes)
- end
- end
-
- context 'with an explicitly configured value' do
- it 'returns the configured value' do
- allow(described_class)
- .to receive(:configuration)
- .and_return({ 'max_replication_difference' => 4 })
-
- expect(described_class.max_replication_difference).to eq(4)
- end
- end
- end
-
- describe '.max_replication_lag_time' do
- context 'without an explicitly configured value' do
- it 'returns the default value' do
- allow(described_class)
- .to receive(:configuration)
- .and_return({})
-
- expect(described_class.max_replication_lag_time).to eq(60)
- end
- end
-
- context 'with an explicitly configured value' do
- it 'returns the configured value' do
- allow(described_class)
- .to receive(:configuration)
- .and_return({ 'max_replication_lag_time' => 4 })
-
- expect(described_class.max_replication_lag_time).to eq(4)
- end
- end
- end
-
- describe '.replica_check_interval' do
- context 'without an explicitly configured value' do
- it 'returns the default value' do
- allow(described_class)
- .to receive(:configuration)
- .and_return({})
-
- expect(described_class.replica_check_interval).to eq(60)
- end
- end
-
- context 'with an explicitly configured value' do
- it 'returns the configured value' do
- allow(described_class)
- .to receive(:configuration)
- .and_return({ 'replica_check_interval' => 4 })
-
- expect(described_class.replica_check_interval).to eq(4)
- end
- end
- end
-
- describe '.hosts' do
- it 'returns a list of hosts' do
- allow(described_class)
- .to receive(:configuration)
- .and_return({ 'hosts' => %w(foo bar baz) })
-
- expect(described_class.hosts).to eq(%w(foo bar baz))
- end
- end
-
- describe '.pool_size' do
- it 'returns a Fixnum' do
- expect(described_class.pool_size).to be_a_kind_of(Integer)
+ # There isn't much to test here as the load balancing settings might not
+ # (and likely aren't) set when running tests.
+ expect(cfg.pool_size).to eq(raw[:pool])
end
end
describe '.enable?' do
before do
- allow(described_class).to receive(:hosts).and_return(%w(foo))
+ allow(described_class.configuration)
+ .to receive(:hosts)
+ .and_return(%w(foo))
end
it 'returns false when no hosts are specified' do
- allow(described_class).to receive(:hosts).and_return([])
+ allow(described_class.configuration).to receive(:hosts).and_return([])
expect(described_class.enable?).to eq(false)
end
@@ -163,10 +82,10 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
it 'returns true when service discovery is enabled' do
- allow(described_class).to receive(:hosts).and_return([])
+ allow(described_class.configuration).to receive(:hosts).and_return([])
allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(false)
- allow(described_class)
+ allow(described_class.configuration)
.to receive(:service_discovery_enabled?)
.and_return(true)
@@ -175,17 +94,17 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
describe '.configured?' do
- it 'returns true when Sidekiq is being used' do
- allow(described_class).to receive(:hosts).and_return(%w(foo))
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ it 'returns true when hosts are configured' do
+ allow(described_class.configuration)
+ .to receive(:hosts)
+ .and_return(%w[foo])
+
expect(described_class.configured?).to eq(true)
end
- it 'returns true when service discovery is enabled in Sidekiq' do
- allow(described_class).to receive(:hosts).and_return([])
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
-
- allow(described_class)
+ it 'returns true when service discovery is enabled' do
+ allow(described_class.configuration).to receive(:hosts).and_return([])
+ allow(described_class.configuration)
.to receive(:service_discovery_enabled?)
.and_return(true)
@@ -193,9 +112,8 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
it 'returns false when neither service discovery nor hosts are configured' do
- allow(described_class).to receive(:hosts).and_return([])
-
- allow(described_class)
+ allow(described_class.configuration).to receive(:hosts).and_return([])
+ allow(described_class.configuration)
.to receive(:service_discovery_enabled?)
.and_return(false)
@@ -204,9 +122,11 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
describe '.configure_proxy' do
- it 'configures the connection proxy' do
+ before do
allow(ActiveRecord::Base).to receive(:load_balancing_proxy=)
+ end
+ it 'configures the connection proxy' do
described_class.configure_proxy
expect(ActiveRecord::Base).to have_received(:load_balancing_proxy=)
@@ -214,71 +134,24 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
context 'when service discovery is enabled' do
- let(:service_discovery) { double(Gitlab::Database::LoadBalancing::ServiceDiscovery) }
-
it 'runs initial service discovery when configuring the connection proxy' do
- allow(described_class)
- .to receive(:configuration)
- .and_return('discover' => { 'record' => 'foo' })
-
- expect(Gitlab::Database::LoadBalancing::ServiceDiscovery).to receive(:new).and_return(service_discovery)
- expect(service_discovery).to receive(:perform_service_discovery)
-
- described_class.configure_proxy
- end
- end
- end
+ discover = instance_spy(Gitlab::Database::LoadBalancing::ServiceDiscovery)
- describe '.active_record_models' do
- it 'returns an Array' do
- expect(described_class.active_record_models).to be_an_instance_of(Array)
- end
- end
+ allow(described_class.configuration)
+ .to receive(:service_discovery)
+ .and_return({ record: 'foo' })
- describe '.service_discovery_enabled?' do
- it 'returns true if service discovery is enabled' do
- allow(described_class)
- .to receive(:configuration)
- .and_return('discover' => { 'record' => 'foo' })
-
- expect(described_class.service_discovery_enabled?).to eq(true)
- end
+ expect(Gitlab::Database::LoadBalancing::ServiceDiscovery)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::Database::LoadBalancing::LoadBalancer),
+ an_instance_of(Hash)
+ )
+ .and_return(discover)
- it 'returns false if service discovery is disabled' do
- expect(described_class.service_discovery_enabled?).to eq(false)
- end
- end
+ expect(discover).to receive(:perform_service_discovery)
- describe '.service_discovery_configuration' do
- context 'when no configuration is provided' do
- it 'returns a default configuration Hash' do
- expect(described_class.service_discovery_configuration).to eq(
- nameserver: 'localhost',
- port: 8600,
- record: nil,
- record_type: 'A',
- interval: 60,
- disconnect_timeout: 120,
- use_tcp: false
- )
- end
- end
-
- context 'when configuration is provided' do
- it 'returns a Hash including the custom configuration' do
- allow(described_class)
- .to receive(:configuration)
- .and_return('discover' => { 'record' => 'foo', 'record_type' => 'SRV' })
-
- expect(described_class.service_discovery_configuration).to eq(
- nameserver: 'localhost',
- port: 8600,
- record: 'foo',
- record_type: 'SRV',
- interval: 60,
- disconnect_timeout: 120,
- use_tcp: false
- )
+ described_class.configure_proxy
end
end
end
@@ -292,15 +165,23 @@ RSpec.describe Gitlab::Database::LoadBalancing do
end
it 'starts service discovery if enabled' do
- allow(described_class)
+ allow(described_class.configuration)
.to receive(:service_discovery_enabled?)
.and_return(true)
instance = double(:instance)
+ config = Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base)
+ lb = Gitlab::Database::LoadBalancing::LoadBalancer.new(config)
+ proxy = Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
+
+ allow(described_class)
+ .to receive(:proxy)
+ .and_return(proxy)
expect(Gitlab::Database::LoadBalancing::ServiceDiscovery)
.to receive(:new)
- .with(an_instance_of(Hash))
+ .with(lb, an_instance_of(Hash))
.and_return(instance)
expect(instance)
@@ -330,7 +211,13 @@ RSpec.describe Gitlab::Database::LoadBalancing do
context 'when the load balancing is configured' do
let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
- let(:proxy) { described_class::ConnectionProxy.new([db_host]) }
+ let(:config) do
+ Gitlab::Database::LoadBalancing::Configuration
+ .new(ActiveRecord::Base, [db_host])
+ end
+
+ let(:load_balancer) { described_class::LoadBalancer.new(config) }
+ let(:proxy) { described_class::ConnectionProxy.new(load_balancer) }
context 'when a proxy connection is used' do
it 'returns :unknown' do
@@ -770,6 +657,16 @@ RSpec.describe Gitlab::Database::LoadBalancing do
it 'redirects queries to the right roles' do
roles = []
+ # If we don't run any queries, the pool may be a NullPool. This can
+ # result in some tests reporting a role as `:unknown`, even though the
+ # tests themselves are correct.
+ #
+ # To prevent this from happening we simply run a simple query to
+ # ensure the proper pool type is put in place. The exact query doesn't
+ # matter, provided it actually runs a query and thus creates a proper
+ # connection pool.
+ model.count
+
subscriber = ActiveSupport::Notifications.subscribe('sql.active_record') do |event|
role = ::Gitlab::Database::LoadBalancing.db_role_for_connection(event.payload[:connection])
roles << role if role.present?
diff --git a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
new file mode 100644
index 00000000000..708d1be6e00
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do
+ let_it_be(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ let(:model) do
+ Class.new(ApplicationRecord) do
+ self.table_name = 'loose_fk_test_table'
+ end
+ end
+
+ before(:all) do
+ migration.create_table :loose_fk_test_table do |t|
+ t.timestamps
+ end
+ end
+
+ before do
+ 3.times { model.create! }
+ end
+
+ context 'when the record deletion tracker trigger is not installed' do
+ it 'does store record deletions' do
+ model.delete_all
+
+ expect(LooseForeignKeys::DeletedRecord.count).to eq(0)
+ end
+ end
+
+ context 'when the record deletion tracker trigger is installed' do
+ before do
+ migration.track_record_deletions(:loose_fk_test_table)
+ end
+
+ it 'stores the record deletion' do
+ records = model.all
+ record_to_be_deleted = records.last
+
+ record_to_be_deleted.delete
+
+ expect(LooseForeignKeys::DeletedRecord.count).to eq(1)
+ deleted_record = LooseForeignKeys::DeletedRecord.all.first
+
+ expect(deleted_record.deleted_table_primary_key_value).to eq(record_to_be_deleted.id)
+ expect(deleted_record.deleted_table_name).to eq('loose_fk_test_table')
+ end
+
+ it 'stores multiple record deletions' do
+ model.delete_all
+
+ expect(LooseForeignKeys::DeletedRecord.count).to eq(3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
index f132ecbf13b..854e97ef897 100644
--- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
include Database::TriggerHelpers
+ include Database::TableSchemaHelpers
let(:migration) do
ActiveRecord::Migration.new.extend(described_class)
@@ -11,6 +12,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
before do
allow(migration).to receive(:puts)
+
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
end
shared_examples_for 'Setting up to rename a column' do
@@ -218,4 +221,105 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
let(:added_column) { :original }
end
end
+
+ describe '#create_table' do
+ let(:table_name) { :test_table }
+ let(:column_attributes) do
+ [
+ { name: 'id', sql_type: 'bigint', null: false, default: nil },
+ { name: 'created_at', sql_type: 'timestamp with time zone', null: false, default: nil },
+ { name: 'updated_at', sql_type: 'timestamp with time zone', null: false, default: nil },
+ { name: 'some_id', sql_type: 'integer', null: false, default: nil },
+ { name: 'active', sql_type: 'boolean', null: false, default: 'true' },
+ { name: 'name', sql_type: 'text', null: true, default: nil }
+ ]
+ end
+
+ context 'using a limit: attribute on .text' do
+ it 'creates the table as expected' do
+ migration.create_table table_name do |t|
+ t.timestamps_with_timezone
+ t.integer :some_id, null: false
+ t.boolean :active, null: false, default: true
+ t.text :name, limit: 100
+ end
+
+ expect_table_columns_to_match(column_attributes, table_name)
+ expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 100')
+ end
+ end
+ end
+
+ describe '#with_lock_retries' do
+ let(:model) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ let(:buffer) { StringIO.new }
+ let(:in_memory_logger) { Gitlab::JsonLogger.new(buffer) }
+ let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
+
+ it 'sets the migration class name in the logs' do
+ model.with_lock_retries(env: env, logger: in_memory_logger) { }
+
+ buffer.rewind
+ expect(buffer.read).to include("\"class\":\"#{model.class}\"")
+ end
+
+ where(raise_on_exhaustion: [true, false])
+
+ with_them do
+ it 'sets raise_on_exhaustion as requested' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) { }
+ end
+ end
+
+ it 'does not raise on exhaustion by default' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ end
+
+ it 'defaults to disallowing subtransactions' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).with(hash_including(allow_savepoints: false)).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ end
+
+ context 'when in transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(true)
+ end
+
+ context 'when lock retries are enabled' do
+ before do
+ allow(model).to receive(:enable_lock_retries?).and_return(true)
+ end
+
+ it 'does not use Gitlab::Database::WithLockRetries and executes the provided block directly' do
+ expect(Gitlab::Database::WithLockRetries).not_to receive(:new)
+
+ expect(model.with_lock_retries(env: env, logger: in_memory_logger) { :block_result }).to eq(:block_result)
+ end
+ end
+
+ context 'when lock retries are not enabled' do
+ before do
+ allow(model).to receive(:enable_lock_retries?).and_return(false)
+ end
+
+ it 'raises an error' do
+ expect { model.with_lock_retries(env: env, logger: in_memory_logger) { } }.to raise_error /can not be run inside an already open transaction/
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 9f9aef77de7..006f8a39f9c 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -798,13 +798,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
# This spec runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'when the statement_timeout is already disabled', :delete do
before do
- ActiveRecord::Base.connection.execute('SET statement_timeout TO 0')
+ ActiveRecord::Migration.connection.execute('SET statement_timeout TO 0')
end
after do
- # Use ActiveRecord::Base.connection instead of model.execute
+ # Use ActiveRecord::Migration.connection instead of model.execute
# so that this call is not counted below
- ActiveRecord::Base.connection.execute('RESET statement_timeout')
+ ActiveRecord::Migration.connection.execute('RESET statement_timeout')
end
it 'yields control without disabling the timeout or resetting' do
@@ -954,10 +954,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:trigger_name) { model.rename_trigger_name(:users, :id, :new) }
let(:user) { create(:user) }
let(:copy_trigger) { double('copy trigger') }
+ let(:connection) { ActiveRecord::Migration.connection }
before do
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
- .with(:users).and_return(copy_trigger)
+ .with(:users, connection: connection).and_return(copy_trigger)
end
it 'copies the value to the new column using the type_cast_function', :aggregate_failures do
@@ -1300,11 +1301,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe '#install_rename_triggers' do
+ let(:connection) { ActiveRecord::Migration.connection }
+
it 'installs the triggers' do
copy_trigger = double('copy trigger')
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
- .with(:users).and_return(copy_trigger)
+ .with(:users, connection: connection).and_return(copy_trigger)
expect(copy_trigger).to receive(:create).with(:old, :new, trigger_name: 'foo')
@@ -1313,11 +1316,13 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe '#remove_rename_triggers' do
+ let(:connection) { ActiveRecord::Migration.connection }
+
it 'removes the function and trigger' do
copy_trigger = double('copy trigger')
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
- .with('bar').and_return(copy_trigger)
+ .with('bar', connection: connection).and_return(copy_trigger)
expect(copy_trigger).to receive(:drop).with('foo')
@@ -1886,6 +1891,61 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#restore_conversion_of_integer_to_bigint' do
+ let(:table) { :test_table }
+ let(:column) { :id }
+ let(:tmp_column) { model.convert_to_bigint_column(column) }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.bigint :id, primary_key: true
+ t.bigint :build_id, null: false
+ t.timestamps
+ end
+ end
+
+ context 'when the target table does not exist' do
+ it 'raises an error' do
+ expect { model.restore_conversion_of_integer_to_bigint(:this_table_is_not_real, column) }
+ .to raise_error('Table this_table_is_not_real does not exist')
+ end
+ end
+
+ context 'when the column to migrate does not exist' do
+ it 'raises an error' do
+ expect { model.restore_conversion_of_integer_to_bigint(table, :this_column_is_not_real) }
+ .to raise_error(ArgumentError, "Column this_column_is_not_real does not exist on #{table}")
+ end
+ end
+
+ context 'when a single column is given' do
+ let(:column_to_convert) { 'id' }
+ let(:temporary_column) { model.convert_to_bigint_column(column_to_convert) }
+
+ it 'creates the correct columns and installs the trigger' do
+ expect(model).to receive(:add_column).with(table, temporary_column, :int, default: 0, null: false)
+
+ expect(model).to receive(:install_rename_triggers).with(table, [column_to_convert], [temporary_column])
+
+ model.restore_conversion_of_integer_to_bigint(table, column_to_convert)
+ end
+ end
+
+ context 'when multiple columns are given' do
+ let(:columns_to_convert) { %i[id build_id] }
+ let(:temporary_columns) { columns_to_convert.map { |column| model.convert_to_bigint_column(column) } }
+
+ it 'creates the correct columns and installs the trigger' do
+ expect(model).to receive(:add_column).with(table, temporary_columns[0], :int, default: 0, null: false)
+ expect(model).to receive(:add_column).with(table, temporary_columns[1], :int, default: 0, null: false)
+
+ expect(model).to receive(:install_rename_triggers).with(table, columns_to_convert, temporary_columns)
+
+ model.restore_conversion_of_integer_to_bigint(table, columns_to_convert)
+ end
+ end
+ end
+
describe '#revert_initialize_conversion_of_integer_to_bigint' do
let(:table) { :test_table }
@@ -2139,7 +2199,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#index_exists_by_name?' do
it 'returns true if an index exists' do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE INDEX test_index_for_index_exists ON projects (path);'
)
@@ -2154,7 +2214,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when an index with a function exists' do
before do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE INDEX test_index ON projects (LOWER(path));'
)
end
@@ -2167,15 +2227,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when an index exists for a table with the same name in another schema' do
before do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE SCHEMA new_test_schema'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE INDEX test_index_on_name ON new_test_schema.projects (LOWER(name));'
)
end
@@ -2255,8 +2315,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
end
- using RSpec::Parameterized::TableSyntax
-
where(raise_on_exhaustion: [true, false])
with_them do
@@ -2276,6 +2334,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.with_lock_retries(env: env, logger: in_memory_logger) { }
end
+
+ it 'defaults to allowing subtransactions' do
+ with_lock_retries = double
+
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).with(hash_including(allow_savepoints: true)).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ end
end
describe '#backfill_iids' do
@@ -2401,19 +2468,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#check_constraint_exists?' do
before do
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE SCHEMA new_test_schema'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
)
- ActiveRecord::Base.connection.execute(
+ ActiveRecord::Migration.connection.execute(
'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
)
end
@@ -2628,6 +2695,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe '#remove_check_constraint' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
it 'removes the constraint' do
drop_sql = /ALTER TABLE test_table\s+DROP CONSTRAINT IF EXISTS check_name/
diff --git a/spec/lib/gitlab/database/migration_spec.rb b/spec/lib/gitlab/database/migration_spec.rb
new file mode 100644
index 00000000000..287e738c24e
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migration do
+ describe '.[]' do
+ context 'version: 1.0' do
+ subject { described_class[1.0] }
+
+ it 'inherits from ActiveRecord::Migration[6.1]' do
+ expect(subject.superclass).to eq(ActiveRecord::Migration[6.1])
+ end
+
+ it 'includes migration helpers version 2' do
+ expect(subject.included_modules).to include(Gitlab::Database::MigrationHelpers::V2)
+ end
+
+ it 'includes LockRetriesConcern' do
+ expect(subject.included_modules).to include(Gitlab::Database::Migration::LockRetriesConcern)
+ end
+ end
+
+ context 'unknown version' do
+ it 'raises an error' do
+ expect { described_class[0] }.to raise_error(ArgumentError, /Unknown migration version/)
+ end
+ end
+ end
+
+ describe '.current_version' do
+ it 'includes current ActiveRecord migration class' do
+ # This breaks upon Rails upgrade. In that case, we'll add a new version in Gitlab::Database::Migration::MIGRATION_CLASSES,
+ # bump .current_version and leave existing migrations and already defined versions of Gitlab::Database::Migration
+ # untouched.
+ expect(described_class[described_class.current_version].superclass).to eq(ActiveRecord::Migration::Current)
+ end
+ end
+
+ describe Gitlab::Database::Migration::LockRetriesConcern do
+ subject { class_def.new }
+
+ context 'when not explicitly called' do
+ let(:class_def) do
+ Class.new do
+ include Gitlab::Database::Migration::LockRetriesConcern
+ end
+ end
+
+ it 'does not disable lock retries by default' do
+ expect(subject.enable_lock_retries?).not_to be_truthy
+ end
+ end
+
+ context 'when explicitly disabled' do
+ let(:class_def) do
+ Class.new do
+ include Gitlab::Database::Migration::LockRetriesConcern
+
+ enable_lock_retries!
+ end
+ end
+
+ it 'does not disable lock retries by default' do
+ expect(subject.enable_lock_retries?).to be_truthy
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb b/spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb
new file mode 100644
index 00000000000..076fb9e8215
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::LockRetryMixin do
+ describe Gitlab::Database::Migrations::LockRetryMixin::ActiveRecordMigrationProxyLockRetries do
+ let(:migration) { double }
+ let(:return_value) { double }
+ let(:class_def) do
+ Class.new do
+ include Gitlab::Database::Migrations::LockRetryMixin::ActiveRecordMigrationProxyLockRetries
+
+ attr_reader :migration
+
+ def initialize(migration)
+ @migration = migration
+ end
+ end
+ end
+
+ describe '#enable_lock_retries?' do
+ subject { class_def.new(migration).enable_lock_retries? }
+
+ it 'delegates to #migration' do
+ expect(migration).to receive(:enable_lock_retries?).and_return(return_value)
+
+ result = subject
+
+ expect(result).to eq(return_value)
+ end
+ end
+
+ describe '#migration_class' do
+ subject { class_def.new(migration).migration_class }
+
+ it 'retrieves actual migration class from #migration' do
+ expect(migration).to receive(:class).and_return(return_value)
+
+ result = subject
+
+ expect(result).to eq(return_value)
+ end
+ end
+ end
+
+ describe Gitlab::Database::Migrations::LockRetryMixin::ActiveRecordMigratorLockRetries do
+ let(:class_def) do
+ Class.new do
+ attr_reader :receiver
+
+ def initialize(receiver)
+ @receiver = receiver
+ end
+
+ def ddl_transaction(migration, &block)
+ receiver.ddl_transaction(migration, &block)
+ end
+
+ def use_transaction?(migration)
+ receiver.use_transaction?(migration)
+ end
+ end.prepend(Gitlab::Database::Migrations::LockRetryMixin::ActiveRecordMigratorLockRetries)
+ end
+
+ subject { class_def.new(receiver) }
+
+ before do
+ allow(migration).to receive(:migration_class).and_return('TestClass')
+ allow(receiver).to receive(:ddl_transaction)
+ end
+
+ context 'with transactions disabled' do
+ let(:migration) { double('migration', enable_lock_retries?: false) }
+ let(:receiver) { double('receiver', use_transaction?: false)}
+
+ it 'calls super method' do
+ p = proc { }
+
+ expect(receiver).to receive(:ddl_transaction).with(migration, &p)
+
+ subject.ddl_transaction(migration, &p)
+ end
+ end
+
+ context 'with transactions enabled, but lock retries disabled' do
+ let(:receiver) { double('receiver', use_transaction?: true)}
+ let(:migration) { double('migration', enable_lock_retries?: false) }
+
+ it 'calls super method' do
+ p = proc { }
+
+ expect(receiver).to receive(:ddl_transaction).with(migration, &p)
+
+ subject.ddl_transaction(migration, &p)
+ end
+ end
+
+ context 'with transactions enabled and lock retries enabled' do
+ let(:receiver) { double('receiver', use_transaction?: true)}
+ let(:migration) { double('migration', enable_lock_retries?: true) }
+
+ it 'calls super method' do
+ p = proc { }
+
+ expect(receiver).not_to receive(:ddl_transaction)
+ expect_next_instance_of(Gitlab::Database::WithLockRetries) do |retries|
+ expect(retries).to receive(:run).with(raise_on_exhaustion: false, &p)
+ end
+
+ subject.ddl_transaction(migration, &p)
+ end
+ end
+ end
+
+ describe '.patch!' do
+ subject { described_class.patch! }
+
+ it 'patches MigrationProxy' do
+ expect(ActiveRecord::MigrationProxy).to receive(:prepend).with(Gitlab::Database::Migrations::LockRetryMixin::ActiveRecordMigrationProxyLockRetries)
+
+ subject
+ end
+
+ it 'patches Migrator' do
+ expect(ActiveRecord::Migrator).to receive(:prepend).with(Gitlab::Database::Migrations::LockRetryMixin::ActiveRecordMigratorLockRetries)
+
+ subject
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index c4fbf53d1c2..27ada12b067 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
+ let(:connection) { ActiveRecord::Base.connection }
+
describe '#current_partitions' do
subject { described_class.new(model, partitioning_key).current_partitions }
@@ -11,7 +13,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
let(:table_name) { :partitioned_test }
before do
- ActiveRecord::Base.connection.execute(<<~SQL)
+ connection.execute(<<~SQL)
CREATE TABLE #{table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
@@ -52,7 +54,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
context 'with existing partitions' do
before do
- ActiveRecord::Base.connection.execute(<<~SQL)
+ connection.execute(<<~SQL)
CREATE TABLE #{model.table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
@@ -113,7 +115,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
context 'without existing partitions' do
before do
- ActiveRecord::Base.connection.execute(<<~SQL)
+ connection.execute(<<~SQL)
CREATE TABLE #{model.table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
@@ -159,7 +161,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
context 'with a regular partition but no catchall (MINVALUE, to) partition' do
before do
- ActiveRecord::Base.connection.execute(<<~SQL)
+ connection.execute(<<~SQL)
CREATE TABLE #{model.table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
@@ -248,6 +250,25 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
)
end
+
+ context 'when the retain_non_empty_partitions is true' do
+ subject { described_class.new(model, partitioning_key, retain_for: 2.months, retain_non_empty_partitions: true).extra_partitions }
+
+ it 'prunes empty partitions' do
+ expect(subject).to contain_exactly(
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'),
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
+ )
+ end
+
+ it 'does not prune non-empty partitions' do
+ connection.execute("INSERT INTO #{table_name} (created_at) VALUES (('2020-05-15'))") # inserting one record into partitioned_test_202005
+
+ expect(subject).to contain_exactly(
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000')
+ )
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/partitioning/multi_database_partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/multi_database_partition_manager_spec.rb
new file mode 100644
index 00000000000..3c94c1bf4ea
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/multi_database_partition_manager_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::MultiDatabasePartitionManager, '#sync_partitions' do
+ subject(:sync_partitions) { manager.sync_partitions }
+
+ let(:manager) { described_class.new(models) }
+ let(:models) { [model1, model2] }
+
+ let(:model1) { double('model1', connection: connection1, table_name: 'table1') }
+ let(:model2) { double('model2', connection: connection1, table_name: 'table2') }
+
+ let(:connection1) { double('connection1') }
+ let(:connection2) { double('connection2') }
+
+ let(:target_manager_class) { Gitlab::Database::Partitioning::PartitionManager }
+ let(:target_manager1) { double('partition manager') }
+ let(:target_manager2) { double('partition manager') }
+
+ before do
+ allow(manager).to receive(:connection_name).and_return('name')
+ end
+
+ it 'syncs model partitions, setting up the appropriate connection for each', :aggregate_failures do
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(model1.connection).and_yield.ordered
+ expect(target_manager_class).to receive(:new).with(model1).and_return(target_manager1).ordered
+ expect(target_manager1).to receive(:sync_partitions)
+
+ expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(model2.connection).and_yield.ordered
+ expect(target_manager_class).to receive(:new).with(model2).and_return(target_manager2).ordered
+ expect(target_manager2).to receive(:sync_partitions)
+
+ sync_partitions
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index 3d60457c3a9..8f1f5b5ba1b 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -12,26 +12,18 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
end
- describe '.register' do
- let(:model) { double(partitioning_strategy: nil) }
-
- it 'remembers registered models' do
- expect { described_class.register(model) }.to change { described_class.models }.to include(model)
- end
- end
-
context 'creating partitions (mocked)' do
- subject(:sync_partitions) { described_class.new(models).sync_partitions }
+ subject(:sync_partitions) { described_class.new(model).sync_partitions }
- let(:models) { [model] }
- let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) }
+ let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: []) }
+ let(:connection) { ActiveRecord::Base.connection }
let(:table) { "some_table" }
before do
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
- allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
+ allow(connection).to receive(:table_exists?).and_call_original
+ allow(connection).to receive(:table_exists?).with(table).and_return(true)
+ allow(connection).to receive(:execute).and_call_original
stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
end
@@ -44,35 +36,23 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
it 'creates the partition' do
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
+ expect(connection).to receive(:execute).with(partitions.first.to_sql)
+ expect(connection).to receive(:execute).with(partitions.second.to_sql)
sync_partitions
end
- context 'error handling with 2 models' do
- let(:models) do
- [
- double(partitioning_strategy: strategy1, table_name: table),
- double(partitioning_strategy: strategy2, table_name: table)
- ]
- end
-
- let(:strategy1) { double('strategy1', missing_partitions: nil, extra_partitions: []) }
- let(:strategy2) { double('strategy2', missing_partitions: partitions, extra_partitions: []) }
+ context 'when an error occurs during partition management' do
+ it 'does not raise an error' do
+ expect(partitioning_strategy).to receive(:missing_partitions).and_raise('this should never happen (tm)')
- it 'still creates partitions for the second table' do
- expect(strategy1).to receive(:missing_partitions).and_raise('this should never happen (tm)')
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
-
- sync_partitions
+ expect { sync_partitions }.not_to raise_error
end
end
end
context 'creating partitions' do
- subject(:sync_partitions) { described_class.new([my_model]).sync_partitions }
+ subject(:sync_partitions) { described_class.new(my_model).sync_partitions }
let(:connection) { ActiveRecord::Base.connection }
let(:my_model) do
@@ -101,15 +81,15 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
context 'detaching partitions (mocked)' do
subject(:sync_partitions) { manager.sync_partitions }
- let(:manager) { described_class.new(models) }
- let(:models) { [model] }
- let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table)}
+ let(:manager) { described_class.new(model) }
+ let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: []) }
+ let(:connection) { ActiveRecord::Base.connection }
let(:table) { "foo" }
before do
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
+ allow(connection).to receive(:table_exists?).and_call_original
+ allow(connection).to receive(:table_exists?).with(table).and_return(true)
stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
end
@@ -131,24 +111,6 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
sync_partitions
end
-
- context 'error handling' do
- let(:models) do
- [
- double(partitioning_strategy: error_strategy, table_name: table),
- model
- ]
- end
-
- let(:error_strategy) { double(extra_partitions: nil, missing_partitions: []) }
-
- it 'still drops partitions for the other model' do
- expect(error_strategy).to receive(:extra_partitions).and_raise('injected error!')
- extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) }
-
- sync_partitions
- end
- end
end
context 'with the partition_pruning feature flag disabled' do
@@ -171,7 +133,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
end
end
- subject { described_class.new([my_model]).sync_partitions }
+ subject { described_class.new(my_model).sync_partitions }
let(:connection) { ActiveRecord::Base.connection }
let(:my_model) do
@@ -280,11 +242,11 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
it 'creates partitions for the future then drops the oldest one after a month' do
# 1 month for the current month, 1 month for the old month that we're retaining data for, headroom
expected_num_partitions = (Gitlab::Database::Partitioning::MonthlyStrategy::HEADROOM + 2.months) / 1.month
- expect { described_class.new([my_model]).sync_partitions }.to change { num_partitions(my_model) }.from(0).to(expected_num_partitions)
+ expect { described_class.new(my_model).sync_partitions }.to change { num_partitions(my_model) }.from(0).to(expected_num_partitions)
travel 1.month
- expect { described_class.new([my_model]).sync_partitions }.to change { has_partition(my_model, 2.months.ago.beginning_of_month) }.from(true).to(false).and(change { num_partitions(my_model) }.by(0))
+ expect { described_class.new(my_model).sync_partitions }.to change { has_partition(my_model, 2.months.ago.beginning_of_month) }.from(true).to(false).and(change { num_partitions(my_model) }.by(0))
end
end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index a524fe681e9..f0e34476cf2 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
before do
allow(migration).to receive(:puts)
+ allow(migration).to receive(:transaction_open?).and_return(false)
connection.execute(<<~SQL)
CREATE TABLE #{target_table_name} (
@@ -141,5 +142,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
.with(source_table_name, target_table_name, options)
end
end
+
+ context 'when run inside a transaction block' do
+ it 'raises an error' do
+ expect(migration).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name, column: column_name)
+ end.to raise_error(/can not be run inside a transaction/)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
index c3edc3a0c87..8ab3816529b 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
before do
allow(migration).to receive(:puts)
+ allow(migration).to receive(:transaction_open?).and_return(false)
connection.execute(<<~SQL)
CREATE TABLE #{table_name} (
@@ -127,6 +128,16 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
end.to raise_error(ArgumentError, /#{table_name} is not a partitioned table/)
end
end
+
+ context 'when run inside a transaction block' do
+ it 'raises an error' do
+ expect(migration).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ migration.add_concurrent_partitioned_index(table_name, column_name)
+ end.to raise_error(/can not be run inside a transaction/)
+ end
+ end
end
describe '#remove_concurrent_partitioned_index_by_name' do
@@ -182,5 +193,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
end.to raise_error(ArgumentError, /#{table_name} is not a partitioned table/)
end
end
+
+ context 'when run inside a transaction block' do
+ it 'raises an error' do
+ expect(migration).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ migration.remove_concurrent_partitioned_index_by_name(table_name, index_name)
+ end.to raise_error(/can not be run inside a transaction/)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
new file mode 100644
index 00000000000..f163b45e01e
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning do
+ describe '.sync_partitions' do
+ let(:partition_manager_class) { described_class::MultiDatabasePartitionManager }
+ let(:partition_manager) { double('partition manager') }
+
+ context 'when no partitioned models are given' do
+ it 'calls the partition manager with the registered models' do
+ expect(partition_manager_class).to receive(:new)
+ .with(described_class.registered_models)
+ .and_return(partition_manager)
+
+ expect(partition_manager).to receive(:sync_partitions)
+
+ described_class.sync_partitions
+ end
+ end
+
+ context 'when partitioned models are given' do
+ it 'calls the partition manager with the given models' do
+ models = ['my special model']
+
+ expect(partition_manager_class).to receive(:new)
+ .with(models)
+ .and_return(partition_manager)
+
+ expect(partition_manager).to receive(:sync_partitions)
+
+ described_class.sync_partitions(models)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
index 40e36bc02e9..8b06f068503 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
@@ -26,4 +26,12 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::DumpSchemaVersionsMixin do
instance.dump_schema_information
end
+
+ it 'does not call touch_all in production' do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+
+ expect(Gitlab::Database::SchemaMigrations).not_to receive(:touch_all)
+
+ instance.dump_schema_information
+ end
end
diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
index 1f1943d00a3..a79e6706149 100644
--- a/spec/lib/gitlab/database/schema_migrations/context_spec.rb
+++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
describe '#schema_directory' do
it 'returns db/schema_migrations' do
- expect(context.schema_directory).to eq(File.join(Rails.root, 'db/schema_migrations'))
+ expect(context.schema_directory).to eq(File.join(Rails.root, described_class.default_schema_migrations_path))
end
context 'CI database' do
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
it 'returns a directory path that is database specific' do
skip_if_multiple_databases_not_setup
- expect(context.schema_directory).to eq(File.join(Rails.root, 'db/schema_migrations'))
+ expect(context.schema_directory).to eq(File.join(Rails.root, described_class.default_schema_migrations_path))
end
end
@@ -124,8 +124,4 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do
end
end
end
-
- def skip_if_multiple_databases_not_setup
- skip 'Skipping because multiple databases not set up' unless Gitlab::Database.has_config?(:ci)
- end
end
diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb
new file mode 100644
index 00000000000..5d616aeb05f
--- /dev/null
+++ b/spec/lib/gitlab/database/shared_model_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SharedModel do
+ describe 'using an external connection' do
+ let!(:original_connection) { described_class.connection }
+ let(:new_connection) { double('connection') }
+
+ it 'overrides the connection for the duration of the block', :aggregate_failures do
+ expect_original_connection_around do
+ described_class.using_connection(new_connection) do
+ expect(described_class.connection).to be(new_connection)
+ end
+ end
+ end
+
+ it 'does not affect connections in other threads', :aggregate_failures do
+ expect_original_connection_around do
+ described_class.using_connection(new_connection) do
+ expect(described_class.connection).to be(new_connection)
+
+ Thread.new do
+ expect(described_class.connection).not_to be(new_connection)
+ end.join
+ end
+ end
+ end
+
+ context 'when the block raises an error', :aggregate_failures do
+ it 're-raises the error, removing the overridden connection' do
+ expect_original_connection_around do
+ expect do
+ described_class.using_connection(new_connection) do
+ expect(described_class.connection).to be(new_connection)
+
+ raise 'here comes an error!'
+ end
+ end.to raise_error(RuntimeError, 'here comes an error!')
+ end
+ end
+ end
+
+ def expect_original_connection_around
+ # For safety, ensure our original connection is distinct from our double
+ # This should be the case, but in case of something leaking we should verify
+ expect(original_connection).not_to be(new_connection)
+ expect(described_class.connection).to be(original_connection)
+
+ yield
+
+ expect(described_class.connection).to be(original_connection)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/transaction/context_spec.rb b/spec/lib/gitlab/database/transaction/context_spec.rb
index 65d52b4d099..37cfc841d48 100644
--- a/spec/lib/gitlab/database/transaction/context_spec.rb
+++ b/spec/lib/gitlab/database/transaction/context_spec.rb
@@ -62,30 +62,32 @@ RSpec.describe Gitlab::Database::Transaction::Context do
it { expect(data[:queries]).to eq(['SELECT 1', 'SELECT * FROM users']) }
end
- describe '#duration' do
+ describe '#track_backtrace' do
before do
- subject.set_start_time
+ subject.track_backtrace(caller)
end
- it { expect(subject.duration).to be >= 0 }
- end
+ it { expect(data[:backtraces]).to be_a(Array) }
+ it { expect(data[:backtraces]).to all(be_a(Array)) }
+ it { expect(data[:backtraces].length).to eq(1) }
+ it { expect(data[:backtraces][0][0]).to be_a(String) }
- context 'when depth is low' do
- it 'does not log data upon COMMIT' do
- expect(subject).not_to receive(:application_info)
+ it 'appends the backtrace' do
+ subject.track_backtrace(caller)
- subject.commit
+ expect(data[:backtraces].length).to eq(2)
+ expect(subject.backtraces).to be_a(Array)
+ expect(subject.backtraces).to all(be_a(Array))
+ expect(subject.backtraces[1][0]).to be_a(String)
end
+ end
- it 'does not log data upon ROLLBACK' do
- expect(subject).not_to receive(:application_info)
-
- subject.rollback
+ describe '#duration' do
+ before do
+ subject.set_start_time
end
- it '#should_log? returns false' do
- expect(subject.should_log?).to be false
- end
+ it { expect(subject.duration).to be >= 0 }
end
shared_examples 'logs transaction data' do
@@ -116,17 +118,9 @@ RSpec.describe Gitlab::Database::Transaction::Context do
end
end
- context 'when depth exceeds threshold' do
- before do
- subject.set_depth(described_class::LOG_DEPTH_THRESHOLD + 1)
- end
-
- it_behaves_like 'logs transaction data'
- end
-
context 'when savepoints count exceeds threshold' do
before do
- data[:savepoints] = described_class::LOG_SAVEPOINTS_THRESHOLD + 1
+ data[:savepoints] = 1
end
it_behaves_like 'logs transaction data'
diff --git a/spec/lib/gitlab/database/transaction/observer_spec.rb b/spec/lib/gitlab/database/transaction/observer_spec.rb
index 7aa24217dc3..e5cc0106c9b 100644
--- a/spec/lib/gitlab/database/transaction/observer_spec.rb
+++ b/spec/lib/gitlab/database/transaction/observer_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
User.first
expect(transaction_context).to be_a(::Gitlab::Database::Transaction::Context)
- expect(context.keys).to match_array(%i(start_time depth savepoints queries))
+ expect(context.keys).to match_array(%i(start_time depth savepoints queries backtraces))
expect(context[:depth]).to eq(2)
expect(context[:savepoints]).to eq(1)
expect(context[:queries].length).to eq(1)
@@ -35,6 +35,7 @@ RSpec.describe Gitlab::Database::Transaction::Observer do
expect(context[:depth]).to eq(2)
expect(context[:savepoints]).to eq(1)
expect(context[:releases]).to eq(1)
+ expect(context[:backtraces].length).to eq(1)
end
describe '.extract_sql_command' do
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 72074f06210..0b960830d89 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -5,7 +5,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::WithLockRetries do
let(:env) { {} }
let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER }
- let(:subject) { described_class.new(env: env, logger: logger, timing_configuration: timing_configuration) }
+ let(:subject) { described_class.new(env: env, logger: logger, allow_savepoints: allow_savepoints, timing_configuration: timing_configuration) }
+ let(:allow_savepoints) { true }
+ let(:connection) { ActiveRecord::Base.connection }
let(:timing_configuration) do
[
@@ -66,7 +68,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
WHERE t.relkind = 'r' AND l.mode = 'ExclusiveLock' AND t.relname = '#{Project.table_name}'
"""
- expect(ActiveRecord::Base.connection.execute(check_exclusive_lock_query).to_a).to be_present
+ expect(connection.execute(check_exclusive_lock_query).to_a).to be_present
end
end
@@ -95,8 +97,8 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_fiber.resume
end
- ActiveRecord::Base.transaction do
- ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ connection.transaction do
+ connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
lock_acquired = true
end
end
@@ -114,7 +116,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'setting the idle transaction timeout' do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the idle transaction timeout' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(connection).to receive(:transaction_open?).and_return(false)
allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
allow(subject).to receive(:run_block_with_lock_timeout).once
@@ -126,7 +128,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the idle transaction timeout so the code can sleep and retry' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(connection).to receive(:transaction_open?).and_return(true)
n = 0
allow(subject).to receive(:run_block_with_lock_timeout).twice do
@@ -151,7 +153,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the lock_timeout' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(connection).to receive(:transaction_open?).and_return(false)
allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).not_to receive(:disable_lock_timeout)
@@ -162,7 +164,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the lock_timeout' do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(connection).to receive(:transaction_open?).and_return(true)
allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).to receive(:disable_lock_timeout)
@@ -197,8 +199,8 @@ RSpec.describe Gitlab::Database::WithLockRetries do
subject.run(raise_on_exhaustion: true) do
lock_attempts += 1
- ActiveRecord::Base.transaction do
- ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ connection.transaction do
+ connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
lock_acquired = true
end
end
@@ -212,11 +214,11 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when statement timeout is reached' do
it 'raises QueryCanceled error' do
lock_acquired = false
- ActiveRecord::Base.connection.execute("SET LOCAL statement_timeout='100ms'")
+ connection.execute("SET LOCAL statement_timeout='100ms'")
expect do
subject.run do
- ActiveRecord::Base.connection.execute("SELECT 1 FROM pg_sleep(0.11)") # 110ms
+ connection.execute("SELECT 1 FROM pg_sleep(0.11)") # 110ms
lock_acquired = true
end
end.to raise_error(ActiveRecord::QueryCanceled)
@@ -229,11 +231,11 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'restore local database variables' do
it do
- expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW lock_timeout").to_a }
+ expect { subject.run {} }.not_to change { connection.execute("SHOW lock_timeout").to_a }
end
it do
- expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW idle_in_transaction_session_timeout").to_a }
+ expect { subject.run {} }.not_to change { connection.execute("SHOW idle_in_transaction_session_timeout").to_a }
end
end
@@ -241,10 +243,10 @@ RSpec.describe Gitlab::Database::WithLockRetries do
let(:timing_configuration) { [[0.015.seconds, 0.025.seconds], [0.015.seconds, 0.025.seconds]] } # 15ms, 25ms
it 'executes `SET LOCAL lock_timeout` using the configured timeout value in milliseconds' do
- expect(ActiveRecord::Base.connection).to receive(:execute).with("RESET idle_in_transaction_session_timeout; RESET lock_timeout").and_call_original
- expect(ActiveRecord::Base.connection).to receive(:execute).with("SAVEPOINT active_record_1", "TRANSACTION").and_call_original
- expect(ActiveRecord::Base.connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
- expect(ActiveRecord::Base.connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:execute).with("RESET idle_in_transaction_session_timeout; RESET lock_timeout").and_call_original
+ expect(connection).to receive(:execute).with("SAVEPOINT active_record_1", "TRANSACTION").and_call_original
+ expect(connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
+ expect(connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original
subject.run { }
end
@@ -256,4 +258,20 @@ RSpec.describe Gitlab::Database::WithLockRetries do
subject.run { }
end
end
+
+ context 'Stop using subtransactions - allow_savepoints: false' do
+ let(:allow_savepoints) { false }
+
+ it 'prevents running inside already open transaction' do
+ allow(connection).to receive(:transaction_open?).and_return(true)
+
+ expect { subject.run { } }.to raise_error(/should not run inside already open transaction/)
+ end
+
+ it 'does not raise the error if not inside open transaction' do
+ allow(connection).to receive(:transaction_open?).and_return(false)
+
+ expect { subject.run { } }.not_to raise_error
+ end
+ end
end
diff --git a/spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb b/spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb
new file mode 100644
index 00000000000..8c3d372cc55
--- /dev/null
+++ b/spec/lib/gitlab/database_importers/work_items/base_type_importer_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DatabaseImporters::WorkItems::BaseTypeImporter do
+ subject { described_class.import }
+
+ it_behaves_like 'work item base types importer'
+end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index c67b5af5e3c..a9a8d5e6314 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -15,6 +15,22 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.default_pool_size' do
+ before do
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
+ end
+
+ it 'returns the max thread size plus a fixed headroom of 10' do
+ expect(described_class.default_pool_size).to eq(17)
+ end
+
+ it 'returns the max thread size plus a DB_POOL_HEADROOM if this env var is present' do
+ stub_env('DB_POOL_HEADROOM', '7')
+
+ expect(described_class.default_pool_size).to eq(14)
+ end
+ end
+
describe '.has_config?' do
context 'two tier database config' do
before do
@@ -139,23 +155,43 @@ RSpec.describe Gitlab::Database do
it { expect(described_class.nulls_first_order('column', 'DESC')).to eq 'column DESC NULLS FIRST'}
end
- describe '.db_config_name' do
- it 'returns the db_config name for the connection' do
- connection = ActiveRecord::Base.connection
+ describe '.db_config_for_connection' do
+ context 'when the regular connection is used' do
+ it 'returns db_config' do
+ connection = ActiveRecord::Base.retrieve_connection
- expect(described_class.db_config_name(connection)).to be_a(String)
- expect(described_class.db_config_name(connection)).to eq(connection.pool.db_config.name)
+ expect(described_class.db_config_for_connection(connection)).to eq(connection.pool.db_config)
+ end
+ end
+
+ context 'when the connection is LoadBalancing::ConnectionProxy' do
+ it 'returns nil' do
+ lb_config = ::Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
+ lb = ::Gitlab::Database::LoadBalancing::LoadBalancer.new(lb_config)
+ proxy = ::Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
+
+ expect(described_class.db_config_for_connection(proxy)).to be_nil
+ end
end
context 'when the pool is a NullPool' do
- it 'returns unknown' do
+ it 'returns nil' do
connection = double(:active_record_connection, pool: ActiveRecord::ConnectionAdapters::NullPool.new)
- expect(described_class.db_config_name(connection)).to eq('unknown')
+ expect(described_class.db_config_for_connection(connection)).to be_nil
end
end
end
+ describe '.db_config_name' do
+ it 'returns the db_config name for the connection' do
+ connection = ActiveRecord::Base.connection
+
+ expect(described_class.db_config_name(connection)).to be_a(String)
+ expect(described_class.db_config_name(connection)).to eq(connection.pool.db_config.name)
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
diff --git a/spec/lib/gitlab/devise_failure_spec.rb b/spec/lib/gitlab/devise_failure_spec.rb
deleted file mode 100644
index a452de59795..00000000000
--- a/spec/lib/gitlab/devise_failure_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::DeviseFailure do
- let(:env) do
- {
- 'REQUEST_URI' => 'http://test.host/',
- 'HTTP_HOST' => 'test.host',
- 'REQUEST_METHOD' => 'GET',
- 'warden.options' => { scope: :user },
- 'rack.session' => {},
- 'rack.session.options' => {},
- 'rack.input' => "",
- 'warden' => OpenStruct.new(message: nil)
- }
- end
-
- let(:response) { described_class.call(env).to_a }
- let(:request) { ActionDispatch::Request.new(env) }
-
- context 'When redirecting' do
- it 'sets the expire_after key' do
- response
-
- expect(env['rack.session.options']).to have_key(:expire_after)
- end
-
- it 'returns to the default redirect location' do
- expect(response.first).to eq(302)
- expect(request.flash[:alert]).to eq('You need to sign in or sign up before continuing.')
- expect(response.second['Location']).to eq('http://test.host/users/sign_in')
- end
- end
-end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 9e94a63ea4b..e643b58ee32 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -185,6 +185,15 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
expect { cache.send(:write_to_redis_hash, diff_hash) }
.to change { Gitlab::Redis::Cache.with { |r| r.hgetall(cache_key) } }
end
+
+ context 'when diff contains unsupported characters' do
+ let(:diff_hash) { { 'README' => [{ line_code: nil, rich_text: nil, text: [0xff, 0xfe, 0x0, 0x23].pack("c*"), type: "match", index: 0, old_pos: 17, new_pos: 17 }] } }
+
+ it 'does not update the cache' do
+ expect { cache.send(:write_to_redis_hash, diff_hash) }
+ .not_to change { Gitlab::Redis::Cache.with { |r| r.hgetall(cache_key) } }
+ end
+ end
end
describe '#clear' do
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 2ef3b324db8..2916e65528f 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -353,13 +353,4 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect { receiver.execute rescue nil }.not_to change { Issue.count }
end
end
-
- def email_fixture(path)
- fixture_file(path).gsub('project_id', project.project_id.to_s)
- end
-
- def service_desk_fixture(path, slug: nil, key: 'mykey')
- slug ||= project.full_path_slug.to_s
- fixture_file(path).gsub('project_slug', slug).gsub('project_key', key)
- end
end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 268ac5dcc21..98170ef437c 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -241,7 +241,7 @@ RSpec.describe Gitlab::EncodingHelper do
let(:data) { binary_string }
let(:kwargs) { {} }
- shared_examples 'detects encoding' do
+ context 'detects encoding' do
it { is_expected.to be_a(Hash) }
it 'correctly detects the binary' do
@@ -264,33 +264,5 @@ RSpec.describe Gitlab::EncodingHelper do
end
end
end
-
- context 'cached_encoding_detection is enabled' do
- before do
- stub_feature_flags(cached_encoding_detection: true)
- end
-
- it_behaves_like 'detects encoding'
-
- context 'cache_key is provided' do
- let(:kwargs) do
- { cache_key: %w(foo bar) }
- end
-
- it 'uses that cache_key to serve from the cache' do
- expect(Rails.cache).to receive(:fetch).with([:detect_binary, CharlockHolmes::VERSION, %w(foo bar)], expires_in: 1.week).and_call_original
-
- expect(subject[:type]).to eq(:binary)
- end
- end
- end
-
- context 'cached_encoding_detection is disabled' do
- before do
- stub_feature_flags(cached_encoding_detection: false)
- end
-
- it_behaves_like 'detects encoding'
- end
end
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 8535d72a61f..1f7b7b90467 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -7,10 +7,6 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
before do
stub_const('Gitlab::Experimentation::EXPERIMENTS', {
- backwards_compatible_test_experiment: {
- tracking_category: 'Team',
- use_backwards_compatible_subject_index: true
- },
test_experiment: {
tracking_category: 'Team',
rollout_strategy: rollout_strategy
@@ -23,7 +19,6 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
allow(Gitlab).to receive(:dev_env_or_com?).and_return(is_gitlab_com)
- Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
end
@@ -124,24 +119,15 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
end
context 'cookie is present' do
- using RSpec::Parameterized::TableSyntax
-
before do
cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
get :index
end
- where(:experiment_key, :index_value) do
- :test_experiment | 'abcd-1234'
- :backwards_compatible_test_experiment | 'abcd1234'
- end
-
- with_them do
- it 'calls Gitlab::Experimentation.in_experiment_group?? with the name of the experiment and the calculated experimentation_subject_index based on the uuid' do
- expect(Gitlab::Experimentation).to receive(:in_experiment_group?).with(experiment_key, subject: index_value)
+ it 'calls Gitlab::Experimentation.in_experiment_group? with the name of the experiment and the calculated experimentation_subject_index based on the uuid' do
+ expect(Gitlab::Experimentation).to receive(:in_experiment_group?).with(:test_experiment, subject: 'abcd-1234')
- check_experiment(experiment_key)
- end
+ check_experiment(:test_experiment)
end
context 'when subject is given' do
diff --git a/spec/lib/gitlab/experimentation/experiment_spec.rb b/spec/lib/gitlab/experimentation/experiment_spec.rb
index 94dbf1d7e4b..d52ab3a8983 100644
--- a/spec/lib/gitlab/experimentation/experiment_spec.rb
+++ b/spec/lib/gitlab/experimentation/experiment_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Gitlab::Experimentation::Experiment do
let(:params) do
{
tracking_category: 'Category1',
- use_backwards_compatible_subject_index: true,
rollout_strategy: nil
}
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index c486538a260..c482874b725 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -7,10 +7,6 @@ RSpec.describe Gitlab::Experimentation do
before do
stub_const('Gitlab::Experimentation::EXPERIMENTS', {
- backwards_compatible_test_experiment: {
- tracking_category: 'Team',
- use_backwards_compatible_subject_index: true
- },
test_experiment: {
tracking_category: 'Team'
},
@@ -22,7 +18,6 @@ RSpec.describe Gitlab::Experimentation do
skip_feature_flags_yaml_validation
skip_default_enabled_yaml_check
- Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
allow(Gitlab).to receive(:com?).and_return(true)
end
@@ -65,97 +60,47 @@ RSpec.describe Gitlab::Experimentation do
end
describe '.in_experiment_group?' do
- context 'with new index calculation' do
- let(:enabled_percentage) { 50 }
- let(:experiment_subject) { 'z' } # Zlib.crc32('test_experimentz') % 100 = 33
-
- subject { described_class.in_experiment_group?(:test_experiment, subject: experiment_subject) }
-
- context 'when experiment is active' do
- context 'when subject is part of the experiment' do
- it { is_expected.to eq(true) }
- end
+ let(:enabled_percentage) { 50 }
+ let(:experiment_subject) { 'z' } # Zlib.crc32('test_experimentz') % 100 = 33
- context 'when subject is not part of the experiment' do
- let(:experiment_subject) { 'a' } # Zlib.crc32('test_experimenta') % 100 = 61
+ subject { described_class.in_experiment_group?(:test_experiment, subject: experiment_subject) }
- it { is_expected.to eq(false) }
- end
+ context 'when experiment is active' do
+ context 'when subject is part of the experiment' do
+ it { is_expected.to eq(true) }
+ end
- context 'when subject has a global_id' do
- let(:experiment_subject) { double(:subject, to_global_id: 'z') }
+ context 'when subject is not part of the experiment' do
+ let(:experiment_subject) { 'a' } # Zlib.crc32('test_experimenta') % 100 = 61
- it { is_expected.to eq(true) }
- end
+ it { is_expected.to eq(false) }
+ end
- context 'when subject is nil' do
- let(:experiment_subject) { nil }
+ context 'when subject has a global_id' do
+ let(:experiment_subject) { double(:subject, to_global_id: 'z') }
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(true) }
+ end
- context 'when subject is an empty string' do
- let(:experiment_subject) { '' }
+ context 'when subject is nil' do
+ let(:experiment_subject) { nil }
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(false) }
end
- context 'when experiment is not active' do
- before do
- allow(described_class).to receive(:active?).and_return(false)
- end
+ context 'when subject is an empty string' do
+ let(:experiment_subject) { '' }
it { is_expected.to eq(false) }
end
end
- context 'with backwards compatible index calculation' do
- let(:experiment_subject) { 'abcd' } # Digest::SHA1.hexdigest('abcd').hex % 100 = 7
-
- subject { described_class.in_experiment_group?(:backwards_compatible_test_experiment, subject: experiment_subject) }
-
- context 'when experiment is active' do
- before do
- allow(described_class).to receive(:active?).and_return(true)
- end
-
- context 'when subject is part of the experiment' do
- it { is_expected.to eq(true) }
- end
-
- context 'when subject is not part of the experiment' do
- let(:experiment_subject) { 'abc' } # Digest::SHA1.hexdigest('abc').hex % 100 = 17
-
- it { is_expected.to eq(false) }
- end
-
- context 'when subject has a global_id' do
- let(:experiment_subject) { double(:subject, to_global_id: 'abcd') }
-
- it { is_expected.to eq(true) }
- end
-
- context 'when subject is nil' do
- let(:experiment_subject) { nil }
-
- it { is_expected.to eq(false) }
- end
-
- context 'when subject is an empty string' do
- let(:experiment_subject) { '' }
-
- it { is_expected.to eq(false) }
- end
+ context 'when experiment is not active' do
+ before do
+ allow(described_class).to receive(:active?).and_return(false)
end
- context 'when experiment is not active' do
- before do
- allow(described_class).to receive(:active?).and_return(false)
- end
-
- it { is_expected.to eq(false) }
- end
+ it { is_expected.to eq(false) }
end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index f58bab52cfa..f4dba5e8d58 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -364,19 +364,39 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
end
describe '.between' do
- subject do
- commits = described_class.between(repository, SeedRepo::Commit::PARENT_ID, SeedRepo::Commit::ID)
- commits.map { |c| c.id }
+ let(:limit) { nil }
+ let(:commit_ids) { commits.map(&:id) }
+
+ subject(:commits) { described_class.between(repository, from, to, limit: limit) }
+
+ context 'requesting a single commit' do
+ let(:from) { SeedRepo::Commit::PARENT_ID }
+ let(:to) { SeedRepo::Commit::ID }
+
+ it { expect(commit_ids).to contain_exactly(to) }
end
- it { is_expected.to contain_exactly(SeedRepo::Commit::ID) }
+ context 'requesting a commit range' do
+ let(:from) { 'v1.0.0' }
+ let(:to) { 'v1.2.0' }
- context 'between_uses_list_commits FF disabled' do
- before do
- stub_feature_flags(between_uses_list_commits: false)
+ let(:commits_in_range) do
+ %w[
+ 570e7b2abdd848b95f2f578043fc23bd6f6fd24d
+ 5937ac0a7beb003549fc5fd26fc247adbce4a52e
+ eb49186cfa5c4338011f5f590fac11bd66c5c631
+ ]
end
- it { is_expected.to contain_exactly(SeedRepo::Commit::ID) }
+ context 'no limit' do
+ it { expect(commit_ids).to eq(commits_in_range) }
+ end
+
+ context 'limited' do
+ let(:limit) { 2 }
+
+ it { expect(commit_ids).to eq(commits_in_range.last(2)) }
+ end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 29e7a1dce1d..9ecd281cce0 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -109,6 +109,32 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RefService, :tag_names
end
+ describe '#tags' do
+ subject { repository.tags }
+
+ it 'gets tags from GitalyClient' do
+ expect_next_instance_of(Gitlab::GitalyClient::RefService) do |service|
+ expect(service).to receive(:tags)
+ end
+
+ subject
+ end
+
+ context 'with sorting option' do
+ subject { repository.tags(sort_by: 'name_asc') }
+
+ it 'gets tags from GitalyClient' do
+ expect_next_instance_of(Gitlab::GitalyClient::RefService) do |service|
+ expect(service).to receive(:tags).with(sort_by: 'name_asc')
+ end
+
+ subject
+ end
+ end
+
+ it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RefService, :tags
+ end
+
describe '#archive_metadata' do
let(:storage_path) { '/tmp' }
let(:cache_key) { File.join(repository.gl_repository, SeedRepo::LastCommit::ID) }
@@ -936,6 +962,159 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
+ describe '#new_blobs' do
+ let(:repository) { mutable_repository }
+ let(:repository_rugged) { mutable_repository_rugged }
+ let(:blob) { create_blob('This is a new blob') }
+ let(:commit) { create_commit('nested/new-blob.txt' => blob) }
+
+ def create_blob(content)
+ repository_rugged.write(content, :blob)
+ end
+
+ def create_commit(blobs)
+ author = { name: 'Test User', email: 'mail@example.com', time: Time.now }
+
+ index = repository_rugged.index
+ blobs.each do |path, oid|
+ index.add(path: path, oid: oid, mode: 0100644)
+ end
+
+ Rugged::Commit.create(repository_rugged,
+ author: author,
+ committer: author,
+ message: "Message",
+ parents: [],
+ tree: index.write_tree(repository_rugged))
+ end
+
+ subject { repository.new_blobs(newrevs).to_a }
+
+ shared_examples '#new_blobs with revisions' do
+ before do
+ expect_next_instance_of(Gitlab::GitalyClient::BlobService) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(expected_newrevs,
+ limit: Gitlab::Git::Repository::REV_LIST_COMMIT_LIMIT,
+ with_paths: true,
+ dynamic_timeout: nil)
+ .once
+ .and_call_original
+ end
+ end
+
+ it 'enumerates new blobs' do
+ expect(subject).to match_array(expected_blobs)
+ end
+
+ it 'memoizes results' do
+ expect(subject).to match_array(expected_blobs)
+ expect(subject).to match_array(expected_blobs)
+ end
+ end
+
+ context 'with a single revision' do
+ let(:newrevs) { commit }
+ let(:expected_newrevs) { ['--not', '--all', '--not', newrevs] }
+ let(:expected_blobs) do
+ [have_attributes(class: Gitlab::Git::Blob, id: blob, path: 'nested/new-blob.txt', size: 18)]
+ end
+
+ it_behaves_like '#new_blobs with revisions'
+ end
+
+ context 'with a single-entry array' do
+ let(:newrevs) { [commit] }
+ let(:expected_newrevs) { ['--not', '--all', '--not'] + newrevs }
+ let(:expected_blobs) do
+ [have_attributes(class: Gitlab::Git::Blob, id: blob, path: 'nested/new-blob.txt', size: 18)]
+ end
+
+ it_behaves_like '#new_blobs with revisions'
+ end
+
+ context 'with multiple revisions' do
+ let(:another_blob) { create_blob('Another blob') }
+ let(:newrevs) { [commit, create_commit('another_path.txt' => another_blob)] }
+ let(:expected_newrevs) { ['--not', '--all', '--not'] + newrevs.sort }
+ let(:expected_blobs) do
+ [
+ have_attributes(class: Gitlab::Git::Blob, id: blob, path: 'nested/new-blob.txt', size: 18),
+ have_attributes(class: Gitlab::Git::Blob, id: another_blob, path: 'another_path.txt', size: 12)
+ ]
+ end
+
+ it_behaves_like '#new_blobs with revisions'
+ end
+
+ context 'with partially blank revisions' do
+ let(:newrevs) { [nil, commit, Gitlab::Git::BLANK_SHA] }
+ let(:expected_newrevs) { ['--not', '--all', '--not', commit] }
+ let(:expected_blobs) do
+ [
+ have_attributes(class: Gitlab::Git::Blob, id: blob, path: 'nested/new-blob.txt', size: 18)
+ ]
+ end
+
+ it_behaves_like '#new_blobs with revisions'
+ end
+
+ context 'with repeated revisions' do
+ let(:newrevs) { [commit, commit, commit] }
+ let(:expected_newrevs) { ['--not', '--all', '--not', commit] }
+ let(:expected_blobs) do
+ [
+ have_attributes(class: Gitlab::Git::Blob, id: blob, path: 'nested/new-blob.txt', size: 18)
+ ]
+ end
+
+ it_behaves_like '#new_blobs with revisions'
+ end
+
+ context 'with preexisting commits' do
+ let(:newrevs) { ['refs/heads/master'] }
+ let(:expected_newrevs) { ['--not', '--all', '--not'] + newrevs }
+ let(:expected_blobs) { [] }
+
+ it_behaves_like '#new_blobs with revisions'
+ end
+
+ shared_examples '#new_blobs without revisions' do
+ before do
+ expect(Gitlab::GitalyClient::BlobService).not_to receive(:new)
+ end
+
+ it 'returns an empty array' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'with a single nil newrev' do
+ let(:newrevs) { nil }
+
+ it_behaves_like '#new_blobs without revisions'
+ end
+
+ context 'with a single zero newrev' do
+ let(:newrevs) { Gitlab::Git::BLANK_SHA }
+
+ it_behaves_like '#new_blobs without revisions'
+ end
+
+ context 'with an empty array' do
+ let(:newrevs) { [] }
+
+ it_behaves_like '#new_blobs without revisions'
+ end
+
+ context 'with array containing only empty refs' do
+ let(:newrevs) { [nil, Gitlab::Git::BLANK_SHA] }
+
+ it_behaves_like '#new_blobs without revisions'
+ end
+ end
+
describe '#new_commits' do
let(:repository) { mutable_repository }
let(:new_commit) do
@@ -1132,28 +1311,6 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
- describe '#ref_name_for_sha' do
- let(:ref_path) { 'refs/heads' }
- let(:sha) { repository.find_branch('master').dereferenced_target.id }
- let(:ref_name) { 'refs/heads/master' }
-
- it 'returns the ref name for the given sha' do
- expect(repository.ref_name_for_sha(ref_path, sha)).to eq(ref_name)
- end
-
- it "returns an empty name if the ref doesn't exist" do
- expect(repository.ref_name_for_sha(ref_path, "000000")).to eq("")
- end
-
- it "raise an exception if the ref is empty" do
- expect { repository.ref_name_for_sha(ref_path, "") }.to raise_error(ArgumentError)
- end
-
- it "raise an exception if the ref is nil" do
- expect { repository.ref_name_for_sha(ref_path, nil) }.to raise_error(ArgumentError)
- end
- end
-
describe '#branches' do
subject { repository.branches }
@@ -1732,83 +1889,42 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
describe '#set_full_path' do
- shared_examples '#set_full_path' do
- before do
- repository_rugged.config["gitlab.fullpath"] = repository_path
- end
-
- context 'is given a path' do
- it 'writes it to disk' do
- repository.set_full_path(full_path: "not-the/real-path.git")
-
- config = File.read(File.join(repository_path, "config"))
-
- expect(config).to include("[gitlab]")
- expect(config).to include("fullpath = not-the/real-path.git")
- end
- end
-
- context 'it is given an empty path' do
- it 'does not write it to disk' do
- repository.set_full_path(full_path: "")
-
- config = File.read(File.join(repository_path, "config"))
-
- expect(config).to include("[gitlab]")
- expect(config).to include("fullpath = #{repository_path}")
- end
- end
+ before do
+ repository_rugged.config["gitlab.fullpath"] = repository_path
+ end
- context 'repository does not exist' do
- it 'raises NoRepository and does not call Gitaly WriteConfig' do
- repository = Gitlab::Git::Repository.new('default', 'does/not/exist.git', '', 'group/project')
+ context 'is given a path' do
+ it 'writes it to disk' do
+ repository.set_full_path(full_path: "not-the/real-path.git")
- expect(repository.gitaly_repository_client).not_to receive(:set_full_path)
+ config = File.read(File.join(repository_path, "config"))
- expect do
- repository.set_full_path(full_path: 'foo/bar.git')
- end.to raise_error(Gitlab::Git::Repository::NoRepository)
- end
+ expect(config).to include("[gitlab]")
+ expect(config).to include("fullpath = not-the/real-path.git")
end
end
- context 'with :set_full_path enabled' do
- before do
- stub_feature_flags(set_full_path: true)
- end
+ context 'it is given an empty path' do
+ it 'does not write it to disk' do
+ repository.set_full_path(full_path: "")
- it_behaves_like '#set_full_path'
- end
+ config = File.read(File.join(repository_path, "config"))
- context 'with :set_full_path disabled' do
- before do
- stub_feature_flags(set_full_path: false)
+ expect(config).to include("[gitlab]")
+ expect(config).to include("fullpath = #{repository_path}")
end
-
- it_behaves_like '#set_full_path'
end
- end
- describe '#set_config' do
- let(:repository) { mutable_repository }
- let(:entries) do
- {
- 'test.foo1' => 'bla bla',
- 'test.foo2' => 1234,
- 'test.foo3' => true
- }
- end
+ context 'repository does not exist' do
+ it 'raises NoRepository and does not call Gitaly WriteConfig' do
+ repository = Gitlab::Git::Repository.new('default', 'does/not/exist.git', '', 'group/project')
- it 'can set config settings' do
- expect(repository.set_config(entries)).to be_nil
+ expect(repository.gitaly_repository_client).not_to receive(:set_full_path)
- expect(repository_rugged.config['test.foo1']).to eq('bla bla')
- expect(repository_rugged.config['test.foo2']).to eq('1234')
- expect(repository_rugged.config['test.foo3']).to eq('true')
- end
-
- after do
- entries.keys.each { |k| repository_rugged.config.delete(k) }
+ expect do
+ repository.set_full_path(full_path: 'foo/bar.git')
+ end.to raise_error(Gitlab::Git::Repository::NoRepository)
+ end
end
end
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index 79ae47f8a7b..4f56595d7d2 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
it { expect(tag.tagger.timezone).to eq("+0200") }
end
- shared_examples 'signed tag' do
+ describe 'signed tag' do
let(:project) { create(:project, :repository) }
let(:tag) { project.repository.find_tag('v1.1.1') }
@@ -54,18 +54,6 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
it { expect(tag.tagger.timezone).to eq("+0100") }
end
- context 'with :get_tag_signatures enabled' do
- it_behaves_like 'signed tag'
- end
-
- context 'with :get_tag_signatures disabled' do
- before do
- stub_feature_flags(get_tag_signatures: false)
- end
-
- it_behaves_like 'signed tag'
- end
-
it { expect(repository.tags.size).to eq(SeedRepo::Repo::TAGS.size) }
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index f11d84bd8d3..005f8ecaa3a 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -189,12 +189,109 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
end
it_behaves_like :repo do
- context 'with pagination parameters' do
- let(:pagination_params) { { limit: 3, page_token: nil } }
+ describe 'Pagination' do
+ context 'with restrictive limit' do
+ let(:pagination_params) { { limit: 3, page_token: nil } }
+
+ it 'returns limited paginated list of tree objects' do
+ expect(entries.count).to eq(3)
+ expect(cursor.next_cursor).to be_present
+ end
+ end
+
+ context 'when limit is equal to number of entries' do
+ let(:entries_count) { entries.count }
+
+ it 'returns all entries without a cursor' do
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, { limit: entries_count, page_token: nil })
+
+ expect(cursor).to be_nil
+ expect(result.entries.count).to eq(entries_count)
+ end
+ end
+
+ context 'when limit is 0' do
+ let(:pagination_params) { { limit: 0, page_token: nil } }
+
+ it 'returns empty result' do
+ expect(entries).to eq([])
+ expect(cursor).to be_nil
+ end
+ end
+
+ context 'when limit is missing' do
+ let(:pagination_params) { { limit: nil, page_token: nil } }
+
+ it 'returns empty result' do
+ expect(entries).to eq([])
+ expect(cursor).to be_nil
+ end
+ end
+
+ context 'when limit is negative' do
+ let(:entries_count) { entries.count }
+
+ it 'returns all entries' do
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, { limit: -1, page_token: nil })
+
+ expect(result.count).to eq(entries_count)
+ expect(cursor).to be_nil
+ end
+
+ context 'when token is provided' do
+ let(:pagination_params) { { limit: 1000, page_token: nil } }
+ let(:token) { entries.second.id }
+
+ it 'returns all entries after token' do
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, { limit: -1, page_token: token })
+
+ expect(result.count).to eq(entries.count - 2)
+ expect(cursor).to be_nil
+ end
+ end
+ end
+
+ context 'when token does not exist' do
+ let(:pagination_params) { { limit: 5, page_token: 'aabbccdd' } }
+
+ it 'raises a command error' do
+ expect { entries }.to raise_error(Gitlab::Git::CommandError, 'could not find starting OID: aabbccdd')
+ end
+ end
+
+ context 'when limit is bigger than number of entries' do
+ let(:pagination_params) { { limit: 1000, page_token: nil } }
+
+ it 'returns only available entries' do
+ expect(entries.count).to be < 20
+ expect(cursor).to be_nil
+ end
+ end
+
+ it 'returns all tree entries in specific order during cursor pagination' do
+ collected_entries = []
+ token = nil
+
+ expected_entries = entries
+
+ loop do
+ result, cursor = Gitlab::Git::Tree.where(repository, sha, path, recursive, { limit: 5, page_token: token })
+
+ collected_entries += result.entries
+ token = cursor&.next_cursor
+
+ break if token.blank?
+ end
+
+ expect(collected_entries.map(&:path)).to match_array(expected_entries.map(&:path))
+
+ expected_order = [
+ collected_entries.select(&:dir?).map(&:path),
+ collected_entries.select(&:file?).map(&:path),
+ collected_entries.select(&:submodule?).map(&:path)
+ ].flatten
- it 'does not support pagination' do
- expect(entries.count).to be >= 10
- expect(cursor).to be_nil
+ expect(collected_entries.map(&:path)).to eq(expected_order)
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index 50078d8c127..f869c66337e 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -92,13 +92,14 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
describe '#list_blobs' do
let(:limit) { 0 }
let(:bytes_limit) { 0 }
- let(:expected_params) { { revisions: revisions, limit: limit, bytes_limit: bytes_limit } }
+ let(:with_paths) { false }
+ let(:expected_params) { { revisions: revisions, limit: limit, bytes_limit: bytes_limit, with_paths: with_paths } }
before do
::Gitlab::GitalyClient.clear_stubs!
end
- subject { client.list_blobs(revisions, limit: limit, bytes_limit: bytes_limit) }
+ subject { client.list_blobs(revisions, limit: limit, bytes_limit: bytes_limit, with_paths: with_paths) }
context 'with a single revision' do
let(:revisions) { ['master'] }
@@ -147,6 +148,24 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
end
end
+ context 'with paths' do
+ let(:revisions) { ['master'] }
+ let(:limit) { 10 }
+ let(:bytes_lmit) { 1024 }
+ let(:with_paths) { true }
+
+ it 'sends a list_blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ subject
+ end
+ end
+
context 'with split contents' do
let(:revisions) { ['master'] }
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index a0e2d43cf45..554a91f2bc5 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -311,6 +311,10 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
end
describe '#list_commits' do
+ let(:revisions) { 'master' }
+ let(:reverse) { false }
+ let(:pagination_params) { nil }
+
shared_examples 'a ListCommits request' do
before do
::Gitlab::GitalyClient.clear_stubs!
@@ -318,26 +322,35 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
it 'sends a list_commits message' do
expect_next_instance_of(Gitaly::CommitService::Stub) do |service|
- expect(service)
- .to receive(:list_commits)
- .with(gitaly_request_with_params(expected_params), kind_of(Hash))
- .and_return([])
+ expected_request = gitaly_request_with_params(
+ Array.wrap(revisions),
+ reverse: reverse,
+ pagination_params: pagination_params
+ )
+
+ expect(service).to receive(:list_commits).with(expected_request, kind_of(Hash)).and_return([])
end
- client.list_commits(revisions)
+ client.list_commits(revisions, reverse: reverse, pagination_params: pagination_params)
end
end
- context 'with a single revision' do
- let(:revisions) { 'master' }
- let(:expected_params) { %w[master] }
+ it_behaves_like 'a ListCommits request'
+
+ context 'with multiple revisions' do
+ let(:revisions) { %w[master --not --all] }
+
+ it_behaves_like 'a ListCommits request'
+ end
+
+ context 'with reverse: true' do
+ let(:reverse) { true }
it_behaves_like 'a ListCommits request'
end
- context 'with multiple revisions' do
- let(:revisions) { %w[master --not --all] }
- let(:expected_params) { %w[master --not --all] }
+ context 'with pagination params' do
+ let(:pagination_params) { { limit: 1, page_token: 'foo' } }
it_behaves_like 'a ListCommits request'
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index e19be965e68..d308612ef31 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -92,6 +92,36 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
end
+ describe '#find_branch' do
+ it 'sends a find_branch message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_branch)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(branch: Gitaly::Branch.new(name: 'name', target_commit: build(:gitaly_commit))))
+
+ client.find_branch('name')
+ end
+ end
+
+ describe '#find_tag' do
+ it 'sends a find_tag message' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_tag)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return(double(tag: Gitaly::Tag.new))
+
+ client.find_tag('name')
+ end
+
+ context 'when tag is empty' do
+ it 'does not send a fing_tag message' do
+ expect_any_instance_of(Gitaly::RefService::Stub).not_to receive(:find_tag)
+
+ expect(client.find_tag('')).to be_nil
+ end
+ end
+ end
+
describe '#default_branch_name' do
it 'sends a find_default_branch_name message' do
expect_any_instance_of(Gitaly::RefService::Stub)
@@ -103,16 +133,6 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
end
- describe '#list_new_blobs' do
- it 'raises DeadlineExceeded when timeout is too small' do
- newrev = '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51'
-
- expect do
- client.list_new_blobs(newrev, dynamic_timeout: 0.001)
- end.to raise_error(GRPC::DeadlineExceeded)
- end
- end
-
describe '#local_branches' do
it 'sends a find_local_branches message' do
expect_any_instance_of(Gitaly::RefService::Stub)
@@ -154,6 +174,22 @@ RSpec.describe Gitlab::GitalyClient::RefService do
client.tags
end
+
+ context 'with sorting option' do
+ it 'sends a correct find_all_tags message' do
+ expected_sort_by = Gitaly::FindAllTagsRequest::SortBy.new(
+ key: :REFNAME,
+ direction: :ASCENDING
+ )
+
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_all_tags)
+ .with(gitaly_request_with_params(sort_by: expected_sort_by), kind_of(Hash))
+ .and_return([])
+
+ client.tags(sort_by: 'name_asc')
+ end
+ end
end
describe '#branch_names_contains_sha' do
@@ -189,13 +225,6 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
end
- describe '#find_ref_name', :seed_helper do
- subject { client.find_ref_name(SeedRepo::Commit::ID, 'refs/heads/master') }
-
- it { is_expected.to be_utf8 }
- it { is_expected.to eq('refs/heads/master') }
- end
-
describe '#ref_exists?', :seed_helper do
it 'finds the master branch ref' do
expect(client.ref_exists?('refs/heads/master')).to eq(true)
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 4b037d3f836..e5502a883b5 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -195,19 +195,6 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
- describe '#squash_in_progress?' do
- let(:squash_id) { 1 }
-
- it 'sends a repository_squash_in_progress message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:is_squash_in_progress)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(in_progress: true))
-
- client.squash_in_progress?(squash_id)
- end
- end
-
describe '#calculate_checksum' do
it 'sends a calculate_checksum message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
index 0af840d2c10..3dc15c7c059 100644
--- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do
noteable_type: 'MergeRequest',
noteable_id: 1,
commit_id: '123abc',
+ original_commit_id: 'original123abc',
file_path: 'README.md',
diff_hunk: hunk,
author: Gitlab::GithubImport::Representation::User
@@ -64,13 +65,14 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do
LegacyDiffNote.table_name,
[
{
+ discussion_id: anything,
noteable_type: 'MergeRequest',
noteable_id: merge_request.id,
project_id: project.id,
author_id: user.id,
note: 'Hello',
system: false,
- commit_id: '123abc',
+ commit_id: 'original123abc',
line_code: note.line_code,
type: 'LegacyDiffNote',
created_at: created_at,
@@ -95,13 +97,14 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do
LegacyDiffNote.table_name,
[
{
+ discussion_id: anything,
noteable_type: 'MergeRequest',
noteable_id: merge_request.id,
project_id: project.id,
author_id: project.creator_id,
note: "*Created by: #{user.username}*\n\nHello",
system: false,
- commit_id: '123abc',
+ commit_id: 'original123abc',
line_code: note.line_code,
type: 'LegacyDiffNote',
created_at: created_at,
diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
index 7750e508713..46b9959ff64 100644
--- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do
html_url: 'https://github.com/foo/bar/pull/42',
path: 'README.md',
commit_id: '123abc',
+ original_commit_id: 'original123abc',
diff_hunk: "@@ -1 +1 @@\n-Hello\n+Hello world",
user: double(:user, id: 4, login: 'alice'),
body: 'Hello world',
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
new file mode 100644
index 00000000000..8c71d7d0ed7
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_diff_notes_importer_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointDiffNotesImporter do
+ let(:client) { double }
+ let(:project) { create(:project, import_source: 'github/repo') }
+
+ subject { described_class.new(project, client) }
+
+ it { is_expected.to include_module(Gitlab::GithubImport::ParallelScheduling) }
+ it { is_expected.to include_module(Gitlab::GithubImport::SingleEndpointNotesImporting) }
+ it { expect(subject.representation_class).to eq(Gitlab::GithubImport::Representation::DiffNote) }
+ it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::DiffNoteImporter) }
+ it { expect(subject.collection_method).to eq(:pull_request_comments) }
+ it { expect(subject.object_type).to eq(:diff_note) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
+
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
+ let(:merge_request) do
+ create(
+ :merged_merge_request,
+ iid: 999,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let(:note) { double(id: 1) }
+ let(:page) { double(objects: [note], number: 1) }
+
+ it 'fetches data' do
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:pull_request_comments, 'github/repo', merge_request.iid, page: 1)
+ .and_yield(page)
+
+ expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(note)
+
+ subject.each_object_to_import {}
+
+ expect(
+ Gitlab::Cache::Import::Caching.set_includes?(
+ "github-importer/merge_request/diff_notes/already-imported/#{project.id}",
+ merge_request.iid
+ )
+ ).to eq(true)
+ end
+
+ it 'skips cached pages' do
+ Gitlab::GithubImport::PageCounter
+ .new(project, "merge_request/#{merge_request.id}/pull_request_comments")
+ .set(2)
+
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:pull_request_comments, 'github/repo', merge_request.iid, page: 2)
+
+ subject.each_object_to_import {}
+ end
+
+ it 'skips cached merge requests' do
+ Gitlab::Cache::Import::Caching.set_add(
+ "github-importer/merge_request/diff_notes/already-imported/#{project.id}",
+ merge_request.iid
+ )
+
+ expect(client).not_to receive(:each_page)
+
+ subject.each_object_to_import {}
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
new file mode 100644
index 00000000000..8d8f2730880
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_notes_importer_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueNotesImporter do
+ let(:client) { double }
+ let(:project) { create(:project, import_source: 'github/repo') }
+
+ subject { described_class.new(project, client) }
+
+ it { is_expected.to include_module(Gitlab::GithubImport::ParallelScheduling) }
+ it { is_expected.to include_module(Gitlab::GithubImport::SingleEndpointNotesImporting) }
+ it { expect(subject.representation_class).to eq(Gitlab::GithubImport::Representation::Note) }
+ it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::NoteImporter) }
+ it { expect(subject.collection_method).to eq(:issue_comments) }
+ it { expect(subject.object_type).to eq(:note) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
+
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
+ let(:issue) do
+ create(
+ :issue,
+ iid: 999,
+ project: project
+ )
+ end
+
+ let(:note) { double(id: 1) }
+ let(:page) { double(objects: [note], number: 1) }
+
+ it 'fetches data' do
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:issue_comments, 'github/repo', issue.iid, page: 1)
+ .and_yield(page)
+
+ expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(note)
+
+ subject.each_object_to_import {}
+
+ expect(
+ Gitlab::Cache::Import::Caching.set_includes?(
+ "github-importer/issue/notes/already-imported/#{project.id}",
+ issue.iid
+ )
+ ).to eq(true)
+ end
+
+ it 'skips cached pages' do
+ Gitlab::GithubImport::PageCounter
+ .new(project, "issue/#{issue.id}/issue_comments")
+ .set(2)
+
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:issue_comments, 'github/repo', issue.iid, page: 2)
+
+ subject.each_object_to_import {}
+ end
+
+ it 'skips cached merge requests' do
+ Gitlab::Cache::Import::Caching.set_add(
+ "github-importer/issue/notes/already-imported/#{project.id}",
+ issue.iid
+ )
+
+ expect(client).not_to receive(:each_page)
+
+ subject.each_object_to_import {}
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
new file mode 100644
index 00000000000..b8282212a90
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_merge_request_notes_importer_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointMergeRequestNotesImporter do
+ let(:client) { double }
+ let(:project) { create(:project, import_source: 'github/repo') }
+
+ subject { described_class.new(project, client) }
+
+ it { is_expected.to include_module(Gitlab::GithubImport::ParallelScheduling) }
+ it { is_expected.to include_module(Gitlab::GithubImport::SingleEndpointNotesImporting) }
+ it { expect(subject.representation_class).to eq(Gitlab::GithubImport::Representation::Note) }
+ it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::NoteImporter) }
+ it { expect(subject.collection_method).to eq(:issue_comments) }
+ it { expect(subject.object_type).to eq(:note) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
+
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ iid: 999,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let(:note) { double(id: 1) }
+ let(:page) { double(objects: [note], number: 1) }
+
+ it 'fetches data' do
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:issue_comments, 'github/repo', merge_request.iid, page: 1)
+ .and_yield(page)
+
+ expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(note)
+
+ subject.each_object_to_import {}
+
+ expect(
+ Gitlab::Cache::Import::Caching.set_includes?(
+ "github-importer/merge_request/notes/already-imported/#{project.id}",
+ merge_request.iid
+ )
+ ).to eq(true)
+ end
+
+ it 'skips cached pages' do
+ Gitlab::GithubImport::PageCounter
+ .new(project, "merge_request/#{merge_request.id}/issue_comments")
+ .set(2)
+
+ expect(client)
+ .to receive(:each_page)
+ .exactly(:once) # ensure to be cached on the second call
+ .with(:issue_comments, 'github/repo', merge_request.iid, page: 2)
+
+ subject.each_object_to_import {}
+ end
+
+ it 'skips cached merge requests' do
+ Gitlab::Cache::Import::Caching.set_add(
+ "github-importer/merge_request/notes/already-imported/#{project.id}",
+ merge_request.iid
+ )
+
+ expect(client).not_to receive(:each_page)
+
+ subject.each_object_to_import {}
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
index f009b61ad89..3afd006109b 100644
--- a/spec/lib/gitlab/github_import/issuable_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache do
- let(:project) { double(:project, id: 4) }
+ let(:project) { double(:project, id: 4, group: nil) }
let(:issue) do
double(:issue, issuable_type: MergeRequest, iid: 1)
end
@@ -26,15 +26,77 @@ RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache d
expect { finder.database_id }.to raise_error(TypeError)
end
+
+ context 'when group is present' do
+ context 'when github_importer_single_endpoint_notes_import feature flag is enabled' do
+ it 'reads cache value with longer timeout' do
+ project = create(:project, import_url: 'http://t0ken@github.com/user/repo.git')
+ group = create(:group, projects: [project])
+
+ stub_feature_flags(github_importer_single_endpoint_notes_import: group)
+
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:read)
+ .with(anything, timeout: Gitlab::Cache::Import::Caching::LONGER_TIMEOUT)
+
+ described_class.new(project, issue).database_id
+ end
+ end
+
+ context 'when github_importer_single_endpoint_notes_import feature flag is disabled' do
+ it 'reads cache value with default timeout' do
+ project = double(:project, id: 4, group: create(:group))
+
+ stub_feature_flags(github_importer_single_endpoint_notes_import: false)
+
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:read)
+ .with(anything, timeout: Gitlab::Cache::Import::Caching::TIMEOUT)
+
+ described_class.new(project, issue).database_id
+ end
+ end
+ end
end
describe '#cache_database_id' do
it 'caches the ID of a database row' do
expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
- .with('github-import/issuable-finder/4/MergeRequest/1', 10)
+ .with('github-import/issuable-finder/4/MergeRequest/1', 10, timeout: 86400)
finder.cache_database_id(10)
end
+
+ context 'when group is present' do
+ context 'when github_importer_single_endpoint_notes_import feature flag is enabled' do
+ it 'caches value with longer timeout' do
+ project = create(:project, import_url: 'http://t0ken@github.com/user/repo.git')
+ group = create(:group, projects: [project])
+
+ stub_feature_flags(github_importer_single_endpoint_notes_import: group)
+
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write)
+ .with(anything, anything, timeout: Gitlab::Cache::Import::Caching::LONGER_TIMEOUT)
+
+ described_class.new(project, issue).cache_database_id(10)
+ end
+ end
+
+ context 'when github_importer_single_endpoint_notes_import feature flag is disabled' do
+ it 'caches value with default timeout' do
+ project = double(:project, id: 4, group: create(:group))
+
+ stub_feature_flags(github_importer_single_endpoint_notes_import: false)
+
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write)
+ .with(anything, anything, timeout: Gitlab::Cache::Import::Caching::TIMEOUT)
+
+ described_class.new(project, issue).cache_database_id(10)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
index 7e540674258..7c24cd0a5db 100644
--- a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb
@@ -67,6 +67,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
html_url: 'https://github.com/foo/bar/pull/42',
path: 'README.md',
commit_id: '123abc',
+ original_commit_id: 'original123abc',
diff_hunk: hunk,
user: double(:user, id: 4, login: 'alice'),
body: 'Hello world',
@@ -99,6 +100,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
'noteable_id' => 42,
'file_path' => 'README.md',
'commit_id' => '123abc',
+ 'original_commit_id' => 'original123abc',
'diff_hunk' => hunk,
'author' => { 'id' => 4, 'login' => 'alice' },
'note' => 'Hello world',
@@ -117,6 +119,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
'noteable_id' => 42,
'file_path' => 'README.md',
'commit_id' => '123abc',
+ 'original_commit_id' => 'original123abc',
'diff_hunk' => hunk,
'note' => 'Hello world',
'created_at' => created_at.to_s,
@@ -145,6 +148,7 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do
'noteable_id' => 42,
'file_path' => 'README.md',
'commit_id' => '123abc',
+ 'original_commit_id' => 'original123abc',
'diff_hunk' => hunk,
'author' => { 'id' => 4, 'login' => 'alice' },
'note' => 'Hello world',
diff --git a/spec/lib/gitlab/github_import/sequential_importer_spec.rb b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
index a5e89049ed9..3c3f8ff59d0 100644
--- a/spec/lib/gitlab/github_import/sequential_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::GithubImport::SequentialImporter do
describe '#execute' do
it 'imports a project in sequence' do
repository = double(:repository)
- project = double(:project, id: 1, repository: repository, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git')
+ project = double(:project, id: 1, repository: repository, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git', group: nil)
importer = described_class.new(project, token: 'foo')
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index f81fa3b1e2e..8eb6eedd72d 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -195,7 +195,7 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
- .with(an_instance_of(String), email)
+ .with(an_instance_of(String), email, timeout: Gitlab::Cache::Import::Caching::TIMEOUT)
finder.email_for_github_username('kittens')
end
@@ -211,6 +211,16 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
expect(finder.email_for_github_username('kittens')).to be_nil
end
+
+ it 'shortens the timeout for Email address in cache when an Email address is private/nil from GitHub' do
+ user = double(:user, email: nil)
+ expect(client).to receive(:user).with('kittens').and_return(user)
+
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write).with(an_instance_of(String), nil, timeout: Gitlab::Cache::Import::Caching::SHORTER_TIMEOUT)
+
+ expect(finder.email_for_github_username('kittens')).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 662757f66ad..1ea9f003098 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -3,13 +3,17 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport do
+ before do
+ stub_feature_flags(github_importer_lower_per_page_limit: false)
+ end
+
context 'github.com' do
- let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1) }
+ let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1, group: nil) }
it 'returns a new Client with a custom token' do
expect(described_class::Client)
.to receive(:new)
- .with('123', host: nil, parallel: true)
+ .with('123', host: nil, parallel: true, per_page: 100)
described_class.new_client_for(project, token: '123')
end
@@ -23,7 +27,7 @@ RSpec.describe Gitlab::GithubImport do
expect(described_class::Client)
.to receive(:new)
- .with('123', host: nil, parallel: true)
+ .with('123', host: nil, parallel: true, per_page: 100)
described_class.new_client_for(project)
end
@@ -45,12 +49,12 @@ RSpec.describe Gitlab::GithubImport do
end
context 'GitHub Enterprise' do
- let(:project) { double(:project, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git') }
+ let(:project) { double(:project, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git', group: nil) }
it 'returns a new Client with a custom token' do
expect(described_class::Client)
.to receive(:new)
- .with('123', host: 'http://github.another-domain.com/api/v3', parallel: true)
+ .with('123', host: 'http://github.another-domain.com/api/v3', parallel: true, per_page: 100)
described_class.new_client_for(project, token: '123')
end
@@ -64,7 +68,7 @@ RSpec.describe Gitlab::GithubImport do
expect(described_class::Client)
.to receive(:new)
- .with('123', host: 'http://github.another-domain.com/api/v3', parallel: true)
+ .with('123', host: 'http://github.another-domain.com/api/v3', parallel: true, per_page: 100)
described_class.new_client_for(project)
end
@@ -88,4 +92,37 @@ RSpec.describe Gitlab::GithubImport do
expect(described_class.formatted_import_url(project)).to eq('http://github.another-domain.com/api/v3')
end
end
+
+ describe '.per_page' do
+ context 'when project group is present' do
+ context 'when github_importer_lower_per_page_limit is enabled' do
+ it 'returns lower per page value' do
+ project = create(:project, import_url: 'http://t0ken@github.com/user/repo.git')
+ group = create(:group, projects: [project])
+
+ stub_feature_flags(github_importer_lower_per_page_limit: group)
+
+ expect(described_class.per_page(project)).to eq(Gitlab::GithubImport::Client::LOWER_PER_PAGE)
+ end
+ end
+
+ context 'when github_importer_lower_per_page_limit is disabled' do
+ it 'returns default per page value' do
+ project = double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1, group: create(:group))
+
+ stub_feature_flags(github_importer_lower_per_page_limit: false)
+
+ expect(described_class.per_page(project)).to eq(Gitlab::GithubImport::Client::DEFAULT_PER_PAGE)
+ end
+ end
+ end
+
+ context 'when project group is missing' do
+ it 'returns default per page value' do
+ project = double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1, group: nil)
+
+ expect(described_class.per_page(project)).to eq(Gitlab::GithubImport::Client::DEFAULT_PER_PAGE)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 2b7138a7a10..614aa55c3c5 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -58,6 +58,7 @@ issues:
- test_reports
- requirement
- incident_management_issuable_escalation_status
+- pending_escalations
work_item_type:
- issues
events:
@@ -223,6 +224,7 @@ ci_pipelines:
- builds
- bridges
- processables
+- generic_commit_statuses
- trigger_requests
- variables
- auto_canceled_by
@@ -318,6 +320,7 @@ integrations:
- project
- service_hook
- jira_tracker_data
+- zentao_tracker_data
- issue_tracker_data
- open_project_tracker_data
hooks:
@@ -354,6 +357,8 @@ project:
- taggings
- base_tags
- topic_taggings
+- topics_acts_as_taggable
+- project_topics
- topics
- chat_services
- cluster
@@ -365,6 +370,7 @@ project:
- value_streams
- group
- namespace
+- project_namespace
- management_clusters
- boards
- last_event
@@ -395,6 +401,7 @@ project:
- teamcity_integration
- pushover_integration
- jira_integration
+- zentao_integration
- redmine_integration
- youtrack_integration
- custom_issue_tracker_integration
@@ -583,6 +590,9 @@ project:
- timelogs
- error_tracking_errors
- error_tracking_client_keys
+- pending_builds
+- security_scans
+- ci_feature_usages
award_emoji:
- awardable
- user
@@ -673,6 +683,7 @@ boards:
- destroyable_lists
- milestone
- iteration
+- iteration_cadence
- board_labels
- board_assignee
- assignee
@@ -762,3 +773,5 @@ push_rule:
- group
bulk_import_export:
- group
+service_desk_setting:
+ - file_template_project
diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
index 0c1b1cd74bf..36a831a785c 100644
--- a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
+++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
@@ -74,4 +74,73 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do
expect(subject.permitted_attributes_for(:labels)).to contain_exactly(:title, :description, :type, :priorities)
end
end
+
+ describe '#permitted_attributes_defined?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:attributes_permitter) { described_class.new }
+
+ where(:relation_name, :permitted_attributes_defined) do
+ :user | false
+ :author | false
+ :ci_cd_settings | false
+ :issuable_sla | false
+ :push_rule | false
+ :metrics_setting | true
+ :project_badges | true
+ :pipeline_schedules | true
+ :error_tracking_setting | true
+ :auto_devops | true
+ end
+
+ with_them do
+ it { expect(attributes_permitter.permitted_attributes_defined?(relation_name)).to eq(permitted_attributes_defined) }
+ end
+ end
+
+ describe 'included_attributes for Project' do
+ let(:prohibited_attributes) { %i[remote_url my_attributes my_ids token my_id test] }
+
+ subject { described_class.new }
+
+ Gitlab::ImportExport::Config.new.to_h[:included_attributes].each do |relation_sym, permitted_attributes|
+ context "for #{relation_sym}" do
+ let(:import_export_config) { Gitlab::ImportExport::Config.new.to_h }
+ let(:project_relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
+
+ let(:relation_hash) { (permitted_attributes + prohibited_attributes).map(&:to_s).zip([]).to_h }
+ let(:relation_name) { project_relation_factory.overrides[relation_sym]&.to_sym || relation_sym }
+ let(:relation_class) { project_relation_factory.relation_class(relation_name) }
+ let(:excluded_keys) { import_export_config.dig(:excluded_keys, relation_sym) || [] }
+
+ let(:cleaned_hash) do
+ Gitlab::ImportExport::AttributeCleaner.new(
+ relation_hash: relation_hash,
+ relation_class: relation_class,
+ excluded_keys: excluded_keys
+ ).clean
+ end
+
+ let(:permitted_hash) { subject.permit(relation_sym, relation_hash) }
+
+ if described_class.new.permitted_attributes_defined?(relation_sym)
+ it 'contains only attributes that are defined as permitted in the import/export config' do
+ expect(permitted_hash.keys).to contain_exactly(*permitted_attributes.map(&:to_s))
+ end
+
+ it 'does not contain attributes that would be cleaned with AttributeCleaner' do
+ expect(cleaned_hash.keys).to include(*permitted_hash.keys)
+ end
+
+ it 'does not contain prohibited attributes that are not related to given relation' do
+ expect(permitted_hash.keys).not_to include(*prohibited_attributes.map(&:to_s))
+ end
+ else
+ it 'is disabled' do
+ expect(subject).not_to be_permitted_attributes_defined(relation_sym)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 77d126e012e..a9efa32f986 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -167,6 +167,7 @@ ProjectMember:
- expires_at
- ldap
- override
+- invite_email_success
User:
- id
- username
@@ -761,6 +762,7 @@ Board:
- group_id
- milestone_id
- iteration_id
+- iteration_cadence_id
- weight
- name
- hide_backlog_list
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index cd1828791c3..b2a11353d0c 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -130,15 +130,25 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
end
context 'when report_on_long_redis_durations is enabled' do
- it 'tracks an exception and continues' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(an_instance_of(described_class::MysteryRedisDurationError),
- command: 'mget',
- duration: be > threshold,
- timestamp: a_string_matching(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{5}/))
+ context 'for an instance other than SharedState' do
+ it 'does nothing' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
- Gitlab::Redis::SharedState.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } }
+ Gitlab::Redis::Queues.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } }
+ end
+ end
+
+ context 'for the SharedState instance' do
+ it 'tracks an exception and continues' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(an_instance_of(described_class::MysteryRedisDurationError),
+ command: 'mget',
+ duration: be > threshold,
+ timestamp: a_string_matching(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{5}/))
+
+ Gitlab::Redis::SharedState.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } }
+ end
end
end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_spec.rb b/spec/lib/gitlab/instrumentation/redis_spec.rb
index 6cddf958f2a..ebc2e92a0dd 100644
--- a/spec/lib/gitlab/instrumentation/redis_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_spec.rb
@@ -28,6 +28,13 @@ RSpec.describe Gitlab::Instrumentation::Redis do
describe '.payload', :request_store do
before do
+ # If this is the first spec in a spec run that uses Redis, there
+ # will be an extra SELECT command to choose the right database. We
+ # don't want to make the spec less precise, so we force that to
+ # happen (if needed) first, then clear the counts.
+ Gitlab::Redis::Cache.with { |redis| redis.info }
+ RequestStore.clear!
+
Gitlab::Redis::Cache.with { |redis| redis.set('cache-test', 321) }
Gitlab::Redis::SharedState.with { |redis| redis.set('shared-state-test', 123) }
end
diff --git a/spec/lib/gitlab/issuables_count_for_state_spec.rb b/spec/lib/gitlab/issuables_count_for_state_spec.rb
index a6170c146ab..cc4ebba863d 100644
--- a/spec/lib/gitlab/issuables_count_for_state_spec.rb
+++ b/spec/lib/gitlab/issuables_count_for_state_spec.rb
@@ -66,4 +66,106 @@ RSpec.describe Gitlab::IssuablesCountForState do
end
end
end
+
+ context 'when store_in_redis_cache is `true`', :clean_gitlab_redis_cache do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:cache_options) { { expires_in: 1.hour } }
+ let(:cache_key) { ['group', group.id, 'issues'] }
+ let(:threshold) { described_class::THRESHOLD }
+ let(:states_count) { { opened: 1, closed: 1, all: 2 } }
+ let(:params) { {} }
+
+ subject { described_class.new(finder, fast_fail: true, store_in_redis_cache: true ) }
+
+ before do
+ allow(finder).to receive(:count_by_state).and_return(states_count)
+ allow_next_instance_of(described_class) do |counter|
+ allow(counter).to receive(:parent_group).and_return(group)
+ end
+ end
+
+ shared_examples 'calculating counts without caching' do
+ it 'does not store in redis store' do
+ expect(Rails.cache).not_to receive(:read)
+ expect(finder).to receive(:count_by_state)
+ expect(Rails.cache).not_to receive(:write)
+ expect(subject[:all]).to eq(states_count[:all])
+ end
+ end
+
+ context 'with Issues' do
+ let(:finder) { IssuesFinder.new(user, params) }
+
+ it 'returns -1 for the requested state' do
+ allow(finder).to receive(:count_by_state).and_raise(ActiveRecord::QueryCanceled)
+ expect(Rails.cache).not_to receive(:write)
+
+ expect(subject[:all]).to eq(-1)
+ end
+
+ context 'when parent group is not present' do
+ let(:group) { nil }
+
+ it_behaves_like 'calculating counts without caching'
+ end
+
+ context 'when params include search filters' do
+ let(:parent) { group }
+
+ before do
+ finder.params[:assignee_username] = [user.username, 'root']
+ end
+
+ it_behaves_like 'calculating counts without caching'
+ end
+
+ context 'when counts are stored in cache' do
+ before do
+ allow(Rails.cache).to receive(:read).with(cache_key, cache_options)
+ .and_return({ opened: 1000, closed: 1000, all: 2000 })
+ end
+
+ it 'does not call finder count_by_state' do
+ expect(finder).not_to receive(:count_by_state)
+
+ expect(subject[:all]).to eq(2000)
+ end
+ end
+
+ context 'when cache is empty' do
+ context 'when state counts are under threshold' do
+ let(:states_count) { { opened: 1, closed: 1, all: 2 } }
+
+ it 'does not store state counts in cache' do
+ expect(Rails.cache).to receive(:read).with(cache_key, cache_options)
+ expect(finder).to receive(:count_by_state)
+ expect(Rails.cache).not_to receive(:write)
+ expect(subject[:all]).to eq(states_count[:all])
+ end
+ end
+
+ context 'when state counts are over threshold' do
+ let(:states_count) do
+ { opened: threshold + 1, closed: threshold + 1, all: (threshold + 1) * 2 }
+ end
+
+ it 'stores state counts in cache' do
+ expect(Rails.cache).to receive(:read).with(cache_key, cache_options)
+ expect(finder).to receive(:count_by_state)
+ expect(Rails.cache).to receive(:write).with(cache_key, states_count, cache_options)
+
+ expect(subject[:all]).to eq((threshold + 1) * 2)
+ end
+ end
+ end
+ end
+
+ context 'with Merge Requests' do
+ let(:finder) { MergeRequestsFinder.new(user, params) }
+
+ it_behaves_like 'calculating counts without caching'
+ end
+ end
end
diff --git a/spec/lib/gitlab/issues/rebalancing/state_spec.rb b/spec/lib/gitlab/issues/rebalancing/state_spec.rb
new file mode 100644
index 00000000000..bdd0dbd365d
--- /dev/null
+++ b/spec/lib/gitlab/issues/rebalancing/state_spec.rb
@@ -0,0 +1,223 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_state do
+ shared_examples 'issues rebalance caching' do
+ describe '#track_new_running_rebalance' do
+ it 'caches a project id to track caching in progress' do
+ expect { rebalance_caching.track_new_running_rebalance }.to change { rebalance_caching.concurrent_running_rebalances_count }.from(0).to(1)
+ end
+ end
+
+ describe '#set and get current_index' do
+ it 'returns zero as current index when index not cached' do
+ expect(rebalance_caching.get_current_index).to eq(0)
+ end
+
+ it 'returns cached current index' do
+ expect { rebalance_caching.cache_current_index(123) }.to change { rebalance_caching.get_current_index }.from(0).to(123)
+ end
+ end
+
+ describe '#set and get current_project' do
+ it 'returns nil if there is no project_id cached' do
+ expect(rebalance_caching.get_current_project_id).to be_nil
+ end
+
+ it 'returns cached current project_id' do
+ expect { rebalance_caching.cache_current_project_id(456) }.to change { rebalance_caching.get_current_project_id }.from(nil).to('456')
+ end
+ end
+
+ describe "#rebalance_in_progress?" do
+ it 'return zero if no re-balances are running' do
+ expect(rebalance_caching.concurrent_running_rebalances_count).to eq(0)
+ end
+
+ it 'return false if no re-balances are running' do
+ expect(rebalance_caching.rebalance_in_progress?).to be false
+ end
+
+ it 'return true a re-balance for given project/namespace is running' do
+ rebalance_caching.track_new_running_rebalance
+
+ expect(rebalance_caching.rebalance_in_progress?).to be true
+ end
+ end
+
+ context 'caching issue ids' do
+ context 'with no issue ids cached' do
+ it 'returns zero when there are no cached issue ids' do
+ expect(rebalance_caching.issue_count).to eq(0)
+ end
+
+ it 'returns empty array when there are no cached issue ids' do
+ expect(rebalance_caching.get_cached_issue_ids(0, 100)).to eq([])
+ end
+ end
+
+ context 'with cached issue ids' do
+ before do
+ generate_and_cache_issues_ids(count: 3)
+ end
+
+ it 'returns count of cached issue ids' do
+ expect(rebalance_caching.issue_count).to eq(3)
+ end
+
+ it 'returns array of issue ids' do
+ expect(rebalance_caching.get_cached_issue_ids(0, 100)).to eq(%w(1 2 3))
+ end
+
+ it 'limits returned values' do
+ expect(rebalance_caching.get_cached_issue_ids(0, 2)).to eq(%w(1 2))
+ end
+
+ context 'when caching duplicate issue_ids' do
+ before do
+ generate_and_cache_issues_ids(count: 3, position_offset: 3, position_direction: -1)
+ end
+
+ it 'does not cache duplicate issues' do
+ expect(rebalance_caching.issue_count).to eq(3)
+ end
+
+ it 'returns cached issues with latest scores' do
+ expect(rebalance_caching.get_cached_issue_ids(0, 100)).to eq(%w(3 2 1))
+ end
+ end
+ end
+ end
+
+ context 'when setting expiration' do
+ context 'when tracking new rebalance' do
+ it 'returns as expired for non existent key' do
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:concurrent_running_rebalances_key))).to be < 0
+ end
+ end
+
+ it 'has expiration set' do
+ rebalance_caching.track_new_running_rebalance
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:concurrent_running_rebalances_key))).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i)
+ end
+ end
+ end
+
+ context 'when setting current index' do
+ it 'returns as expiring for non existent key' do
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:current_index_key))).to be < 0
+ end
+ end
+
+ it 'has expiration set' do
+ rebalance_caching.cache_current_index(123)
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:current_index_key))).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i)
+ end
+ end
+ end
+
+ context 'when setting current project id' do
+ it 'returns as expired for non existent key' do
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:current_project_key))).to be < 0
+ end
+ end
+
+ it 'has expiration set' do
+ rebalance_caching.cache_current_project_id(456)
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:current_project_key))).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i)
+ end
+ end
+ end
+
+ context 'when setting cached issue ids' do
+ it 'returns as expired for non existent key' do
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:issue_ids_key))).to be < 0
+ end
+ end
+
+ it 'has expiration set' do
+ generate_and_cache_issues_ids(count: 3)
+
+ ::Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(rebalance_caching.send(:issue_ids_key))).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i)
+ end
+ end
+ end
+ end
+
+ context 'cleanup cache' do
+ before do
+ generate_and_cache_issues_ids(count: 3)
+ rebalance_caching.cache_current_index(123)
+ rebalance_caching.cache_current_project_id(456)
+ rebalance_caching.track_new_running_rebalance
+ end
+
+ it 'removes cache keys' do
+ expect(check_existing_keys).to eq(4)
+
+ rebalance_caching.cleanup_cache
+
+ expect(check_existing_keys).to eq(0)
+ end
+ end
+ end
+
+ context 'rebalancing issues in namespace' do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, namespace: group) }
+
+ subject(:rebalance_caching) { described_class.new(group, group.projects) }
+
+ it { expect(rebalance_caching.send(:rebalanced_container_type)).to eq(described_class::NAMESPACE) }
+
+ it_behaves_like 'issues rebalance caching'
+ end
+
+ context 'rebalancing issues in a project' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:rebalance_caching) { described_class.new(project.namespace, Project.where(id: project)) }
+
+ it { expect(rebalance_caching.send(:rebalanced_container_type)).to eq(described_class::PROJECT) }
+
+ it_behaves_like 'issues rebalance caching'
+ end
+
+ # count - how many issue ids to generate, issue ids will start at 1
+ # position_offset - if you'd want to offset generated relative_position for the issue ids,
+ # relative_position is generated as = issue id * 10 + position_offset
+ # position_direction - (1) for positive relative_positions, (-1) for negative relative_positions
+ def generate_and_cache_issues_ids(count:, position_offset: 0, position_direction: 1)
+ issues = []
+
+ count.times do |idx|
+ id = idx + 1
+ issues << double(relative_position: position_direction * (id * 10 + position_offset), id: id)
+ end
+
+ rebalance_caching.cache_issue_ids(issues)
+ end
+
+ def check_existing_keys
+ index = 0
+
+ index += 1 if rebalance_caching.get_current_index > 0
+ index += 1 if rebalance_caching.get_current_project_id.present?
+ index += 1 if rebalance_caching.get_cached_issue_ids(0, 100).present?
+ index += 1 if rebalance_caching.rebalance_in_progress?
+
+ index
+ end
+end
diff --git a/spec/lib/gitlab/kas/client_spec.rb b/spec/lib/gitlab/kas/client_spec.rb
index 40e18f58ee4..5b89023cc13 100644
--- a/spec/lib/gitlab/kas/client_spec.rb
+++ b/spec/lib/gitlab/kas/client_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Kas::Client do
let_it_be(:project) { create(:project) }
+ let_it_be(:agent) { create(:cluster_agent, project: project) }
describe '#initialize' do
context 'kas is not enabled' do
@@ -44,6 +45,32 @@ RSpec.describe Gitlab::Kas::Client do
expect(token).to receive(:audience=).with(described_class::JWT_AUDIENCE)
end
+ describe '#get_connected_agents' do
+ let(:stub) { instance_double(Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub) }
+ let(:request) { instance_double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsRequest) }
+ let(:response) { double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsResponse, agents: connected_agents) }
+
+ let(:connected_agents) { [double] }
+
+ subject { described_class.new.get_connected_agents(project: project) }
+
+ before do
+ expect(Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub).to receive(:new)
+ .with('example.kas.internal', :this_channel_is_insecure, timeout: described_class::TIMEOUT)
+ .and_return(stub)
+
+ expect(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsRequest).to receive(:new)
+ .with(project_id: project.id)
+ .and_return(request)
+
+ expect(stub).to receive(:get_connected_agents)
+ .with(request, metadata: { 'authorization' => 'bearer test-token' })
+ .and_return(response)
+ end
+
+ it { expect(subject).to eq(connected_agents) }
+ end
+
describe '#list_agent_config_files' do
let(:stub) { instance_double(Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub) }
diff --git a/spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb b/spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb
new file mode 100644
index 00000000000..e6815a46a56
--- /dev/null
+++ b/spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Middleware::SidekiqWebStatic do
+ let(:app) { double(:app) }
+ let(:middleware) { described_class.new(app) }
+ let(:env) { {} }
+
+ describe '#call' do
+ before do
+ env['HTTP_X_SENDFILE_TYPE'] = 'X-Sendfile'
+ env['PATH_INFO'] = path
+ end
+
+ context 'with an /admin/sidekiq route' do
+ let(:path) { '/admin/sidekiq/javascripts/application.js'}
+
+ it 'deletes the HTTP_X_SENDFILE_TYPE header' do
+ expect(app).to receive(:call)
+
+ middleware.call(env)
+
+ expect(env['HTTP_X_SENDFILE_TYPE']).to be_nil
+ end
+ end
+
+ context 'with some static asset route' do
+ let(:path) { '/assets/test.png' }
+
+ it 'keeps the HTTP_X_SENDFILE_TYPE header' do
+ expect(app).to receive(:call)
+
+ middleware.call(env)
+
+ expect(env['HTTP_X_SENDFILE_TYPE']).to eq('X-Sendfile')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
new file mode 100644
index 00000000000..ac2695977c4
--- /dev/null
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
+ subject { described_class }
+
+ describe '.available_for_type?' do
+ it 'returns true for Group' do
+ expect(subject.available_for_type?(Group.all)).to be_truthy
+ end
+
+ it 'return false for other types of relations' do
+ expect(subject.available_for_type?(User.all)).to be_falsey
+ end
+ end
+
+ describe '.available?' do
+ let(:request_context) { double('request_context', params: { order_by: order_by, sort: sort }) }
+ let(:cursor_based_request_context) { Gitlab::Pagination::Keyset::CursorBasedRequestContext.new(request_context) }
+
+ context 'with order-by name asc' do
+ let(:order_by) { :name }
+ let(:sort) { :asc }
+
+ it 'returns true for Group' do
+ expect(subject.available?(cursor_based_request_context, Group.all)).to be_truthy
+ end
+
+ it 'return false for other types of relations' do
+ expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
+ end
+ end
+
+ context 'with other order-by columns' do
+ let(:order_by) { :path }
+ let(:sort) { :asc }
+
+ it 'returns false for Group' do
+ expect(subject.available?(cursor_based_request_context, Group.all)).to be_falsey
+ end
+
+ it 'return false for other types of relations' do
+ expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
index 8a26e153385..dcb8138bdde 100644
--- a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
+++ b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
allow(request_context).to receive(:request).and_return(fake_request)
allow(project.repository).to receive(:branch_count).and_return(branches.size)
- expect(finder).to receive(:execute).with(gitaly_pagination: true).and_return(branches)
+ expect(finder).to receive(:execute).and_return(branches)
expect(request_context).to receive(:header).with('X-Per-Page', '2')
expect(request_context).to receive(:header).with('X-Page', '1')
expect(request_context).to receive(:header).with('X-Next-Page', '2')
@@ -99,6 +99,7 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
before do
allow(request_context).to receive(:request).and_return(fake_request)
+ allow(finder).to receive(:is_a?).with(BranchesFinder) { true }
expect(finder).to receive(:execute).with(gitaly_pagination: true).and_return(branches)
end
diff --git a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
index 6e9e987f90c..69384e0c501 100644
--- a/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/column_order_definition_spec.rb
@@ -185,4 +185,25 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
end
end
end
+
+ describe "#order_direction_as_sql_string" do
+ let(:nulls_last_order) do
+ described_class.new(
+ attribute_name: :name,
+ column_expression: Project.arel_table[:name],
+ order_expression: Gitlab::Database.nulls_last_order('merge_request_metrics.merged_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('merge_request_metrics.merged_at', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last, # null values are always last
+ distinct: false
+ )
+ end
+
+ it { expect(project_name_column.order_direction_as_sql_string).to eq('ASC') }
+ it { expect(project_name_column.reverse.order_direction_as_sql_string).to eq('DESC') }
+ it { expect(project_name_lower_column.order_direction_as_sql_string).to eq('DESC') }
+ it { expect(project_name_lower_column.reverse.order_direction_as_sql_string).to eq('ASC') }
+ it { expect(nulls_last_order.order_direction_as_sql_string).to eq('DESC NULLS LAST') }
+ it { expect(nulls_last_order.reverse.order_direction_as_sql_string).to eq('ASC NULLS FIRST') }
+ end
end
diff --git a/spec/lib/gitlab/pagination/keyset/cursor_based_request_context_spec.rb b/spec/lib/gitlab/pagination/keyset/cursor_based_request_context_spec.rb
new file mode 100644
index 00000000000..79de6f230ec
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/cursor_based_request_context_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::CursorBasedRequestContext do
+ let(:params) { { per_page: 2, cursor: 'eyJuYW1lIjoiR2l0TGFiIEluc3RhbmNlIiwiaWQiOiI1MiIsIl9rZCI6Im4ifQ==', order_by: :name, sort: :asc } }
+ let(:request) { double('request', url: 'http://localhost') }
+ let(:request_context) { double('request_context', header: nil, params: params, request: request) }
+
+ describe '#per_page' do
+ subject(:per_page) { described_class.new(request_context).per_page }
+
+ it { is_expected.to eq 2 }
+ end
+
+ describe '#cursor' do
+ subject(:cursor) { described_class.new(request_context).cursor }
+
+ it { is_expected.to eq 'eyJuYW1lIjoiR2l0TGFiIEluc3RhbmNlIiwiaWQiOiI1MiIsIl9rZCI6Im4ifQ==' }
+ end
+
+ describe '#order_by' do
+ subject(:order_by) { described_class.new(request_context).order_by }
+
+ it { is_expected.to eq({ name: :asc }) }
+ end
+
+ describe '#apply_headers' do
+ let(:request) { double('request', url: "http://#{Gitlab.config.gitlab.host}/api/v4/projects?per_page=3") }
+ let(:params) { { per_page: 3 } }
+ let(:request_context) { double('request_context', header: nil, params: params, request: request) }
+ let(:cursor_for_next_page) { 'eyJuYW1lIjoiSDVicCIsImlkIjoiMjgiLCJfa2QiOiJuIn0=' }
+
+ subject(:apply_headers) { described_class.new(request_context).apply_headers(cursor_for_next_page) }
+
+ it 'sets Link header with same host/path as the original request' do
+ orig_uri = URI.parse(request_context.request.url)
+
+ expect(request_context).to receive(:header).once do |name, header|
+ first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
+
+ uri = URI.parse(first_link)
+
+ expect(name).to eq('Link')
+ expect(uri.host).to eq(orig_uri.host)
+ expect(uri.path).to eq(orig_uri.path)
+ end
+
+ apply_headers
+ end
+
+ it 'sets Link header with a cursor to the next page' do
+ orig_uri = URI.parse(request_context.request.url)
+
+ expect(request_context).to receive(:header).once do |name, header|
+ first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
+
+ query = CGI.parse(URI.parse(first_link).query)
+
+ expect(name).to eq('Link')
+ expect(query.except('cursor')).to eq(CGI.parse(orig_uri.query).except('cursor'))
+ expect(query['cursor']).to eq([cursor_for_next_page])
+ end
+
+ apply_headers
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/cursor_pager_spec.rb b/spec/lib/gitlab/pagination/keyset/cursor_pager_spec.rb
new file mode 100644
index 00000000000..783e728b34c
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/cursor_pager_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::CursorPager do
+ let(:relation) { Group.all.order(:name, :id) }
+ let(:per_page) { 3 }
+ let(:params) { { cursor: nil, per_page: per_page } }
+ let(:request_context) { double('request_context', params: params) }
+ let(:cursor_based_request_context) { Gitlab::Pagination::Keyset::CursorBasedRequestContext.new(request_context) }
+
+ before_all do
+ create_list(:group, 7)
+ end
+
+ describe '#paginate' do
+ subject(:paginated_result) { described_class.new(cursor_based_request_context).paginate(relation) }
+
+ it 'returns the limited relation' do
+ expect(paginated_result).to eq(relation.limit(per_page))
+ end
+ end
+
+ describe '#finalize' do
+ subject(:finalize) do
+ service = described_class.new(cursor_based_request_context)
+ # we need to do this because `finalize` can only be called
+ # after `paginate` is called. Otherwise the `paginator` object won't be set.
+ service.paginate(relation)
+ service.finalize
+ end
+
+ it 'passes information about next page to request' do
+ cursor_for_next_page = relation.keyset_paginate(**params).cursor_for_next_page
+
+ expect_next_instance_of(Gitlab::Pagination::Keyset::HeaderBuilder, request_context) do |builder|
+ expect(builder).to receive(:add_next_page_header).with({ cursor: cursor_for_next_page })
+ end
+
+ finalize
+ end
+
+ context 'when retrieving the last page' do
+ let(:relation) { Group.where('id > ?', Group.maximum(:id) - per_page).order(:name, :id) }
+
+ it 'does not build information about the next page' do
+ expect(Gitlab::Pagination::Keyset::HeaderBuilder).not_to receive(:new)
+
+ finalize
+ end
+ end
+
+ context 'when retrieving an empty page' do
+ let(:relation) { Group.where('id > ?', Group.maximum(:id) + 1).order(:name, :id) }
+
+ it 'does not build information about the next page' do
+ expect(Gitlab::Pagination::Keyset::HeaderBuilder).not_to receive(:new)
+
+ finalize
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/array_scope_columns_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/array_scope_columns_spec.rb
new file mode 100644
index 00000000000..2cebf0d9473
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/array_scope_columns_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::ArrayScopeColumns do
+ let(:columns) { [:relative_position, :id] }
+
+ subject(:array_scope_columns) { described_class.new(columns) }
+
+ it 'builds array column names' do
+ expect(array_scope_columns.array_aggregated_column_names).to eq(%w[array_cte_relative_position_array array_cte_id_array])
+ end
+
+ context 'when no columns are given' do
+ let(:columns) { [] }
+
+ it { expect { array_scope_columns }.to raise_error /No array columns were given/ }
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/column_data_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/column_data_spec.rb
new file mode 100644
index 00000000000..4f200c9096f
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/column_data_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::ColumnData do
+ subject(:column_data) { described_class.new('id', 'issue_id', Issue.arel_table) }
+
+ describe '#array_aggregated_column_name' do
+ it { expect(column_data.array_aggregated_column_name).to eq('issues_id_array') }
+ end
+
+ describe '#projection' do
+ it 'returns the Arel projection for the column with a new alias' do
+ expect(column_data.projection.to_sql).to eq('"issues"."id" AS issue_id')
+ end
+ end
+
+ it 'accepts symbols for original_column_name and as' do
+ column_data = described_class.new(:id, :issue_id, Issue.arel_table)
+
+ expect(column_data.projection.to_sql).to eq('"issues"."id" AS issue_id')
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_columns_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_columns_spec.rb
new file mode 100644
index 00000000000..f4fa14e2261
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/order_by_columns_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::OrderByColumns do
+ let(:columns) do
+ [
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :relative_position,
+ order_expression: Issue.arel_table[:relative_position].desc
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].desc
+ )
+ ]
+ end
+
+ subject(:order_by_columns) { described_class.new(columns, Issue.arel_table) }
+
+ describe '#array_aggregated_column_names' do
+ it { expect(order_by_columns.array_aggregated_column_names).to eq(%w[issues_relative_position_array issues_id_array]) }
+ end
+
+ describe '#original_column_names' do
+ it { expect(order_by_columns.original_column_names).to eq(%w[relative_position id]) }
+ end
+
+ describe '#cursor_values' do
+ it 'returns the keyset pagination cursor values from the column arrays as SQL expression' do
+ expect(order_by_columns.cursor_values('tbl')).to eq({
+ "id" => "tbl.issues_id_array[position]",
+ "relative_position" => "tbl.issues_relative_position_array[position]"
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
new file mode 100644
index 00000000000..4ce51e37685
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb
@@ -0,0 +1,225 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder do
+ let_it_be(:two_weeks_ago) { 2.weeks.ago }
+ let_it_be(:three_weeks_ago) { 3.weeks.ago }
+ let_it_be(:four_weeks_ago) { 4.weeks.ago }
+ let_it_be(:five_weeks_ago) { 5.weeks.ago }
+
+ let_it_be(:top_level_group) { create(:group) }
+ let_it_be(:sub_group_1) { create(:group, parent: top_level_group) }
+ let_it_be(:sub_group_2) { create(:group, parent: top_level_group) }
+ let_it_be(:sub_sub_group_1) { create(:group, parent: sub_group_2) }
+
+ let_it_be(:project_1) { create(:project, group: top_level_group) }
+ let_it_be(:project_2) { create(:project, group: top_level_group) }
+
+ let_it_be(:project_3) { create(:project, group: sub_group_1) }
+ let_it_be(:project_4) { create(:project, group: sub_group_2) }
+
+ let_it_be(:project_5) { create(:project, group: sub_sub_group_1) }
+
+ let_it_be(:issues) do
+ [
+ create(:issue, project: project_1, created_at: three_weeks_ago, relative_position: 5),
+ create(:issue, project: project_1, created_at: two_weeks_ago),
+ create(:issue, project: project_2, created_at: two_weeks_ago, relative_position: 15),
+ create(:issue, project: project_2, created_at: two_weeks_ago),
+ create(:issue, project: project_3, created_at: four_weeks_ago),
+ create(:issue, project: project_4, created_at: five_weeks_ago, relative_position: 10),
+ create(:issue, project: project_5, created_at: four_weeks_ago)
+ ]
+ end
+
+ shared_examples 'correct ordering examples' do
+ let(:iterator) do
+ Gitlab::Pagination::Keyset::Iterator.new(
+ scope: scope.limit(batch_size),
+ in_operator_optimization_options: in_operator_optimization_options
+ )
+ end
+
+ it 'returns records in correct order' do
+ all_records = []
+ iterator.each_batch(of: batch_size) do |records|
+ all_records.concat(records)
+ end
+
+ expect(all_records).to eq(expected_order)
+ end
+ end
+
+ context 'when ordering by issues.id DESC' do
+ let(:scope) { Issue.order(id: :desc) }
+ let(:expected_order) { issues.sort_by(&:id).reverse }
+
+ let(:in_operator_optimization_options) do
+ {
+ array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id),
+ array_mapping_scope: -> (id_expression) { Issue.where(Issue.arel_table[:project_id].eq(id_expression)) },
+ finder_query: -> (id_expression) { Issue.where(Issue.arel_table[:id].eq(id_expression)) }
+ }
+ end
+
+ context 'when iterating records one by one' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+
+ context 'when iterating records with LIMIT 3' do
+ let(:batch_size) { 3 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+
+ context 'when loading records at once' do
+ let(:batch_size) { issues.size + 1 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+ end
+
+ context 'when ordering by issues.relative_position DESC NULLS LAST, id DESC' do
+ let(:scope) { Issue.order(order) }
+ let(:expected_order) { scope.to_a }
+
+ let(:order) do
+ # NULLS LAST ordering requires custom Order object for keyset pagination:
+ # https://docs.gitlab.com/ee/development/database/keyset_pagination.html#complex-order-configuration
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :relative_position,
+ column_expression: Issue.arel_table[:relative_position],
+ order_expression: Gitlab::Database.nulls_last_order('relative_position', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('relative_position', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Issue.arel_table[:id].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ let(:in_operator_optimization_options) do
+ {
+ array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id),
+ array_mapping_scope: -> (id_expression) { Issue.where(Issue.arel_table[:project_id].eq(id_expression)) },
+ finder_query: -> (_relative_position_expression, id_expression) { Issue.where(Issue.arel_table[:id].eq(id_expression)) }
+ }
+ end
+
+ context 'when iterating records one by one' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+
+ context 'when iterating records with LIMIT 3' do
+ let(:batch_size) { 3 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+ end
+
+ context 'when ordering by issues.created_at DESC, issues.id ASC' do
+ let(:scope) { Issue.order(created_at: :desc, id: :asc) }
+ let(:expected_order) { issues.sort_by { |issue| [issue.created_at.to_f * -1, issue.id] } }
+
+ let(:in_operator_optimization_options) do
+ {
+ array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id),
+ array_mapping_scope: -> (id_expression) { Issue.where(Issue.arel_table[:project_id].eq(id_expression)) },
+ finder_query: -> (_created_at_expression, id_expression) { Issue.where(Issue.arel_table[:id].eq(id_expression)) }
+ }
+ end
+
+ context 'when iterating records one by one' do
+ let(:batch_size) { 1 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+
+ context 'when iterating records with LIMIT 3' do
+ let(:batch_size) { 3 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+
+ context 'when loading records at once' do
+ let(:batch_size) { issues.size + 1 }
+
+ it_behaves_like 'correct ordering examples'
+ end
+ end
+
+ context 'pagination support' do
+ let(:scope) { Issue.order(id: :desc) }
+ let(:expected_order) { issues.sort_by(&:id).reverse }
+
+ let(:options) do
+ {
+ scope: scope,
+ array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id),
+ array_mapping_scope: -> (id_expression) { Issue.where(Issue.arel_table[:project_id].eq(id_expression)) },
+ finder_query: -> (id_expression) { Issue.where(Issue.arel_table[:id].eq(id_expression)) }
+ }
+ end
+
+ context 'offset pagination' do
+ subject(:optimized_scope) { described_class.new(**options).execute }
+
+ it 'paginates the scopes' do
+ first_page = optimized_scope.page(1).per(2)
+ expect(first_page).to eq(expected_order[0...2])
+
+ second_page = optimized_scope.page(2).per(2)
+ expect(second_page).to eq(expected_order[2...4])
+
+ third_page = optimized_scope.page(3).per(2)
+ expect(third_page).to eq(expected_order[4...6])
+ end
+ end
+
+ context 'keyset pagination' do
+ def paginator(cursor = nil)
+ scope.keyset_paginate(cursor: cursor, per_page: 2, keyset_order_options: options)
+ end
+
+ it 'paginates correctly' do
+ first_page = paginator.records
+ expect(first_page).to eq(expected_order[0...2])
+
+ cursor_for_page_2 = paginator.cursor_for_next_page
+
+ second_page = paginator(cursor_for_page_2).records
+ expect(second_page).to eq(expected_order[2...4])
+
+ cursor_for_page_3 = paginator(cursor_for_page_2).cursor_for_next_page
+
+ third_page = paginator(cursor_for_page_3).records
+ expect(third_page).to eq(expected_order[4...6])
+ end
+ end
+ end
+
+ it 'raises error when unsupported scope is passed' do
+ scope = Issue.order(Issue.arel_table[:id].lower.desc)
+
+ options = {
+ scope: scope,
+ array_scope: Project.where(namespace_id: top_level_group.self_and_descendants.select(:id)).select(:id),
+ array_mapping_scope: -> (id_expression) { Issue.where(Issue.arel_table[:project_id].eq(id_expression)) },
+ finder_query: -> (id_expression) { Issue.where(Issue.arel_table[:id].eq(id_expression)) }
+ }
+
+ expect { described_class.new(**options).execute }.to raise_error(/The order on the scope does not support keyset pagination/)
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index b867dd533e0..3c14d91fdfd 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -538,6 +538,47 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
it_behaves_like 'cursor attribute examples'
+
+ context 'with projections' do
+ context 'when additional_projections is empty' do
+ let(:scope) { Project.select(:id, :namespace_id) }
+
+ subject(:sql) { order.apply_cursor_conditions(scope, { id: '100' }).to_sql }
+
+ it 'has correct projections' do
+ is_expected.to include('SELECT "projects"."id", "projects"."namespace_id" FROM "projects"')
+ end
+ end
+
+ context 'when there are additional_projections' do
+ let(:order) do
+ order = Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'created_at_field',
+ column_expression: Project.arel_table[:created_at],
+ order_expression: Project.arel_table[:created_at].desc,
+ order_direction: :desc,
+ distinct: false,
+ add_to_projections: true
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table[:id].desc
+ )
+ ])
+
+ order
+ end
+
+ let(:scope) { Project.select(:id, :namespace_id).reorder(order) }
+
+ subject(:sql) { order.apply_cursor_conditions(scope).to_sql }
+
+ it 'has correct projections' do
+ is_expected.to include('SELECT "projects"."id", "projects"."namespace_id", "projects"."created_at" AS created_at_field FROM "projects"')
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
index f8d50fbc517..ffecbb06ff8 100644
--- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb
+++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
context 'when the api_kaminari_count_with_limit feature flag is enabled' do
before do
- stub_feature_flags(api_kaminari_count_with_limit: true)
+ stub_feature_flags(api_kaminari_count_with_limit: true, lower_relation_max_count_limit: false)
end
context 'when resources count is less than MAX_COUNT_LIMIT' do
@@ -120,6 +120,41 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
end
end
+ context 'when lower_relation_max_count_limit FF is enabled' do
+ before do
+ stub_feature_flags(lower_relation_max_count_limit: true)
+ end
+
+ it_behaves_like 'paginated response'
+ it_behaves_like 'response with pagination headers'
+
+ context 'when limit is met' do
+ before do
+ stub_const("::Kaminari::ActiveRecordRelationMethods::MAX_COUNT_NEW_LOWER_LIMIT", 2)
+ end
+
+ it_behaves_like 'paginated response'
+
+ it 'does not return the X-Total and X-Total-Pages headers' do
+ expect_no_header('X-Total')
+ expect_no_header('X-Total-Pages')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+ expect_header('X-Next-Page', '2')
+ expect_header('X-Prev-Page', '')
+
+ expect_header('Link', anything) do |_key, val|
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 1).to_query}>; rel="first"))
+ expect(val).to include(%Q(<#{incoming_api_projects_url}?#{query.merge(page: 2).to_query}>; rel="next"))
+ expect(val).not_to include('rel="last"')
+ expect(val).not_to include('rel="prev"')
+ end
+
+ subject.paginate(resource)
+ end
+ end
+ end
+
it 'does not return the total headers when excluding them' do
expect_no_header('X-Total')
expect_no_header('X-Total-Pages')
diff --git a/spec/lib/gitlab/patch/legacy_database_config_spec.rb b/spec/lib/gitlab/patch/legacy_database_config_spec.rb
new file mode 100644
index 00000000000..e6c0bdbf360
--- /dev/null
+++ b/spec/lib/gitlab/patch/legacy_database_config_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Patch::LegacyDatabaseConfig do
+ it 'module is included' do
+ expect(Rails::Application::Configuration).to include(described_class)
+ end
+
+ describe 'config/database.yml' do
+ let(:configuration) { Rails::Application::Configuration.new(Rails.root) }
+
+ before do
+ # The `AS::ConfigurationFile` calls `read` in `def initialize`
+ # thus we cannot use `expect_next_instance_of`
+ # rubocop:disable RSpec/AnyInstanceOf
+ expect_any_instance_of(ActiveSupport::ConfigurationFile)
+ .to receive(:read).with(Rails.root.join('config/database.yml')).and_return(database_yml)
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ shared_examples 'hash containing main: connection name' do
+ it 'returns a hash containing only main:' do
+ database_configuration = configuration.database_configuration
+
+ expect(database_configuration).to match(
+ "production" => { "main" => a_hash_including("adapter") },
+ "development" => { "main" => a_hash_including("adapter" => "postgresql") },
+ "test" => { "main" => a_hash_including("adapter" => "postgresql") }
+ )
+ end
+ end
+
+ context 'when a new syntax is used' do
+ let(:database_yml) do
+ <<-EOS
+ production:
+ main:
+ adapter: postgresql
+ encoding: unicode
+ database: gitlabhq_production
+ username: git
+ password: "secure password"
+ host: localhost
+
+ development:
+ main:
+ adapter: postgresql
+ encoding: unicode
+ database: gitlabhq_development
+ username: postgres
+ password: "secure password"
+ host: localhost
+ variables:
+ statement_timeout: 15s
+
+ test: &test
+ main:
+ adapter: postgresql
+ encoding: unicode
+ database: gitlabhq_test
+ username: postgres
+ password:
+ host: localhost
+ prepared_statements: false
+ variables:
+ statement_timeout: 15s
+ EOS
+ end
+
+ include_examples 'hash containing main: connection name'
+
+ it 'configuration is not legacy one' do
+ configuration.database_configuration
+
+ expect(configuration.uses_legacy_database_config).to eq(false)
+ end
+ end
+
+ context 'when a legacy syntax is used' do
+ let(:database_yml) do
+ <<-EOS
+ production:
+ adapter: postgresql
+ encoding: unicode
+ database: gitlabhq_production
+ username: git
+ password: "secure password"
+ host: localhost
+
+ development:
+ adapter: postgresql
+ encoding: unicode
+ database: gitlabhq_development
+ username: postgres
+ password: "secure password"
+ host: localhost
+ variables:
+ statement_timeout: 15s
+
+ test: &test
+ adapter: postgresql
+ encoding: unicode
+ database: gitlabhq_test
+ username: postgres
+ password:
+ host: localhost
+ prepared_statements: false
+ variables:
+ statement_timeout: 15s
+ EOS
+ end
+
+ include_examples 'hash containing main: connection name'
+
+ it 'configuration is legacy' do
+ configuration.database_configuration
+
+ expect(configuration.uses_legacy_database_config).to eq(true)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index d343634fb92..aa13660deb4 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -468,6 +468,7 @@ RSpec.describe Gitlab::PathRegex do
end
let_it_be(:git_paths) { container_paths.map { |path| path + '.git' } }
+ let_it_be(:git_lfs_paths) { git_paths.flat_map { |path| [path + '/info/lfs/', path + '/gitlab-lfs/'] } }
let_it_be(:snippet_paths) { container_paths.grep(%r{snippets/\d}) }
let_it_be(:wiki_git_paths) { (container_paths - snippet_paths).map { |path| path + '.wiki.git' } }
let_it_be(:invalid_git_paths) { invalid_paths.map { |path| path + '.git' } }
@@ -498,6 +499,15 @@ RSpec.describe Gitlab::PathRegex do
end
end
+ describe '.repository_git_lfs_route_regex' do
+ subject { %r{\A#{described_class.repository_git_lfs_route_regex}\z} }
+
+ it 'matches the expected paths' do
+ expect_route_match(git_lfs_paths)
+ expect_no_route_match(container_paths + invalid_paths + git_paths + invalid_git_paths)
+ end
+ end
+
describe '.repository_wiki_git_route_regex' do
subject { %r{\A#{described_class.repository_wiki_git_route_regex}\z} }
diff --git a/spec/lib/gitlab/rack_attack/request_spec.rb b/spec/lib/gitlab/rack_attack/request_spec.rb
new file mode 100644
index 00000000000..3be7ec17e45
--- /dev/null
+++ b/spec/lib/gitlab/rack_attack/request_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::RackAttack::Request do
+ describe 'FILES_PATH_REGEX' do
+ subject { described_class::FILES_PATH_REGEX }
+
+ it { is_expected.to match('/api/v4/projects/1/repository/files/README') }
+ it { is_expected.to match('/api/v4/projects/1/repository/files/README?ref=master') }
+ it { is_expected.to match('/api/v4/projects/1/repository/files/README/blame') }
+ it { is_expected.to match('/api/v4/projects/1/repository/files/README/raw') }
+ it { is_expected.to match('/api/v4/projects/some%2Fnested%2Frepo/repository/files/README') }
+ it { is_expected.not_to match('/api/v4/projects/some/nested/repo/repository/files/README') }
+ end
+end
diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb
index 788d2eac61f..8f03905e08d 100644
--- a/spec/lib/gitlab/rack_attack_spec.rb
+++ b/spec/lib/gitlab/rack_attack_spec.rb
@@ -10,12 +10,19 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
let(:throttles) do
{
- throttle_unauthenticated: Gitlab::Throttle.unauthenticated_options,
- throttle_authenticated_api: Gitlab::Throttle.authenticated_api_options,
+ throttle_unauthenticated_api: Gitlab::Throttle.options(:api, authenticated: false),
+ throttle_authenticated_api: Gitlab::Throttle.options(:api, authenticated: true),
+ throttle_unauthenticated_web: Gitlab::Throttle.unauthenticated_web_options,
+ throttle_authenticated_web: Gitlab::Throttle.authenticated_web_options,
throttle_product_analytics_collector: { limit: 100, period: 60 },
- throttle_unauthenticated_protected_paths: Gitlab::Throttle.unauthenticated_options,
- throttle_authenticated_protected_paths_api: Gitlab::Throttle.authenticated_api_options,
- throttle_authenticated_protected_paths_web: Gitlab::Throttle.authenticated_web_options
+ throttle_unauthenticated_protected_paths: Gitlab::Throttle.protected_paths_options,
+ throttle_authenticated_protected_paths_api: Gitlab::Throttle.protected_paths_options,
+ throttle_authenticated_protected_paths_web: Gitlab::Throttle.protected_paths_options,
+ throttle_unauthenticated_packages_api: Gitlab::Throttle.options(:packages_api, authenticated: false),
+ throttle_authenticated_packages_api: Gitlab::Throttle.options(:packages_api, authenticated: true),
+ throttle_authenticated_git_lfs: Gitlab::Throttle.throttle_authenticated_git_lfs_options,
+ throttle_unauthenticated_files_api: Gitlab::Throttle.options(:files_api, authenticated: false),
+ throttle_authenticated_files_api: Gitlab::Throttle.options(:files_api, authenticated: true)
}
end
@@ -84,6 +91,15 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
end
end
+ it 'enables dry-runs for `throttle_unauthenticated_api` and `throttle_unauthenticated_web` when selecting `throttle_unauthenticated`' do
+ stub_env('GITLAB_THROTTLE_DRY_RUN', 'throttle_unauthenticated')
+
+ described_class.configure(fake_rack_attack)
+
+ expect(fake_rack_attack).to have_received(:track).with('throttle_unauthenticated_api', throttles[:throttle_unauthenticated_api])
+ expect(fake_rack_attack).to have_received(:track).with('throttle_unauthenticated_web', throttles[:throttle_unauthenticated_web])
+ end
+
context 'user allowlist' do
subject { described_class.user_allowlist }
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index f6e69aa6533..177e9d346b6 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -332,14 +332,59 @@ RSpec.describe Gitlab::ReferenceExtractor do
it 'returns visible references of given type' do
expect(subject.references(:issue)).to eq([issue])
end
+ end
- it 'does not increase stateful_not_visible_counter' do
- expect { subject.references(:issue) }.not_to change { subject.stateful_not_visible_counter }
- end
+ it 'does not return any references' do
+ expect(subject.references(:issue)).to be_empty
+ end
+ end
+
+ describe '#all_visible?' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project2) }
+
+ let(:text) { "Ref. #{issue.to_reference} and #{issue2.to_reference(project)}" }
+
+ subject { described_class.new(project, user) }
+
+ before do
+ subject.analyze(text)
end
- it 'increases stateful_not_visible_counter' do
- expect { subject.references(:issue) }.to change { subject.stateful_not_visible_counter }.by(1)
+ it 'returns true if no references were parsed yet' do
+ expect(subject.all_visible?).to be_truthy
+ end
+
+ context 'when references was already called' do
+ let(:membership) { [] }
+
+ before do
+ membership.each { |p| p.add_developer(user) }
+
+ subject.references(:issue)
+ end
+
+ it 'returns false' do
+ expect(subject.all_visible?).to be_falsey
+ end
+
+ context 'when user can access only some references' do
+ let(:membership) { [project] }
+
+ it 'returns false' do
+ expect(subject.all_visible?).to be_falsey
+ end
+ end
+
+ context 'when user can access all references' do
+ let(:membership) { [project, project2] }
+
+ it 'returns true' do
+ expect(subject.all_visible?).to be_truthy
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index c1c97e87a4c..f1b4e50b1eb 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -924,4 +924,25 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/Release.gpg') }
it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/pool/compon/a/pkg/file.name') }
end
+
+ describe '.composer_package_version_regex' do
+ subject { described_class.composer_package_version_regex }
+
+ it { is_expected.to match('v1.2.3') }
+ it { is_expected.to match('v1.2.x') }
+ it { is_expected.to match('v1.2.X') }
+ it { is_expected.to match('1.2.3') }
+ it { is_expected.to match('1') }
+ it { is_expected.to match('v1') }
+ it { is_expected.to match('1.2') }
+ it { is_expected.to match('v1.2') }
+ it { is_expected.not_to match('1.2.3-beta') }
+ it { is_expected.not_to match('1.2.x-beta') }
+ it { is_expected.not_to match('1.2.X-beta') }
+ it { is_expected.not_to match('1.2.3-alpha.3') }
+ it { is_expected.not_to match('1./2.3') }
+ it { is_expected.not_to match('v1./2.3') }
+ it { is_expected.not_to match('../../../../../1.2.3') }
+ it { is_expected.not_to match('%2e%2e%2f1.2.3') }
+ end
end
diff --git a/spec/lib/gitlab/repository_cache/preloader_spec.rb b/spec/lib/gitlab/repository_cache/preloader_spec.rb
new file mode 100644
index 00000000000..8c6618c9f8f
--- /dev/null
+++ b/spec/lib/gitlab/repository_cache/preloader_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_redis_caching do
+ let(:projects) { create_list(:project, 2, :repository) }
+ let(:repositories) { projects.map(&:repository) }
+
+ describe '#preload' do
+ context 'when the values are already cached' do
+ before do
+ # Warm the cache but use a different model so they are not memoized
+ repos = Project.id_in(projects).order(:id).map(&:repository)
+
+ allow(repos[0].head_tree).to receive(:readme_path).and_return('README.txt')
+ allow(repos[1].head_tree).to receive(:readme_path).and_return('README.md')
+
+ repos.map(&:exists?)
+ repos.map(&:readme_path)
+ end
+
+ it 'prevents individual cache reads for cached methods' do
+ expect(Rails.cache).to receive(:read_multi).once.and_call_original
+
+ described_class.new(repositories).preload(
+ %i[exists? readme_path]
+ )
+
+ expect(Rails.cache).not_to receive(:read)
+ expect(Rails.cache).not_to receive(:write)
+
+ expect(repositories[0].exists?).to eq(true)
+ expect(repositories[0].readme_path).to eq('README.txt')
+
+ expect(repositories[1].exists?).to eq(true)
+ expect(repositories[1].readme_path).to eq('README.md')
+ end
+ end
+
+ context 'when values are not cached' do
+ it 'reads and writes from cache individually' do
+ described_class.new(repositories).preload(
+ %i[exists? has_visible_content?]
+ )
+
+ expect(Rails.cache).to receive(:read).exactly(4).times
+ expect(Rails.cache).to receive(:write).exactly(4).times
+
+ repositories.each(&:exists?)
+ repositories.each(&:has_visible_content?)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index b8972f28889..27d65e14347 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -148,13 +148,13 @@ RSpec.describe Gitlab::SearchResults do
end
end
- it 'includes merge requests from source and target projects' do
+ it 'does not include merge requests from source projects' do
forked_project = fork_project(project, user)
merge_request_2 = create(:merge_request, target_project: project, source_project: forked_project, title: 'foo')
results = described_class.new(user, 'foo', Project.where(id: forked_project.id))
- expect(results.objects('merge_requests')).to include merge_request_2
+ expect(results.objects('merge_requests')).not_to include merge_request_2
end
describe '#merge_requests' do
diff --git a/spec/lib/gitlab/seeder_spec.rb b/spec/lib/gitlab/seeder_spec.rb
new file mode 100644
index 00000000000..877461a7064
--- /dev/null
+++ b/spec/lib/gitlab/seeder_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Seeder do
+ describe '.quiet' do
+ it 'disables mail deliveries' do
+ expect(ActionMailer::Base.perform_deliveries).to eq(true)
+
+ described_class.quiet do
+ expect(ActionMailer::Base.perform_deliveries).to eq(false)
+ end
+
+ expect(ActionMailer::Base.perform_deliveries).to eq(true)
+ end
+
+ it 'disables new note notifications' do
+ note = create(:note_on_issue)
+
+ notification_service = NotificationService.new
+
+ expect(notification_service).to receive(:send_new_note_notifications).twice
+
+ notification_service.new_note(note)
+
+ described_class.quiet do
+ expect(notification_service.new_note(note)).to eq(nil)
+ end
+
+ notification_service.new_note(note)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
index 3dd5ac8ee6c..e818b03cf75 100644
--- a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
+++ b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
@@ -48,6 +48,18 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do
cli.run(%w(*))
end
+ it 'raises an error when the arguments contain newlines' do
+ invalid_arguments = [
+ ["foo\n"],
+ ["foo\r"],
+ %W[foo b\nar]
+ ]
+
+ invalid_arguments.each do |arguments|
+ expect { cli.run(arguments) }.to raise_error(described_class::CommandError)
+ end
+ end
+
context 'with --negate flag' do
it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz'])
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index d67cb95f483..cc69a11f7f8 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -9,7 +9,14 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
described_class.new(job, queue)
end
- let(:job) { { 'class' => 'AuthorizedProjectsWorker', 'args' => [1], 'jid' => '123' } }
+ let(:wal_locations) do
+ {
+ main: '0/D525E3A8',
+ ci: 'AB/12345'
+ }
+ end
+
+ let(:job) { { 'class' => 'AuthorizedProjectsWorker', 'args' => [1], 'jid' => '123', 'wal_locations' => wal_locations } }
let(:queue) { 'authorized_projects' }
let(:idempotency_key) do
@@ -74,13 +81,39 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'when there was no job in the queue yet' do
it { expect(duplicate_job.check!).to eq('123') }
- it "adds a key with ttl set to #{described_class::DUPLICATE_KEY_TTL}" do
+ it "adds a idempotency key with ttl set to #{described_class::DUPLICATE_KEY_TTL}" do
expect { duplicate_job.check! }
.to change { read_idempotency_key_with_ttl(idempotency_key) }
.from([nil, -2])
.to(['123', be_within(1).of(described_class::DUPLICATE_KEY_TTL)])
end
+ context 'when wal locations is not empty' do
+ it "adds a existing wal locations key with ttl set to #{described_class::DUPLICATE_KEY_TTL}" do
+ expect { duplicate_job.check! }
+ .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([nil, -2])
+ .to([wal_locations[:main], be_within(1).of(described_class::DUPLICATE_KEY_TTL)])
+ .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([nil, -2])
+ .to([wal_locations[:ci], be_within(1).of(described_class::DUPLICATE_KEY_TTL)])
+ end
+ end
+
+ context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
+ before do
+ stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
+ end
+
+ it "does not change the existing wal locations key's TTL" do
+ expect { duplicate_job.check! }
+ .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([nil, -2])
+ .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([nil, -2])
+ end
+ end
+
it "adds the idempotency key to the jobs payload" do
expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
end
@@ -89,6 +122,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'when there was already a job with same arguments in the same queue' do
before do
set_idempotency_key(idempotency_key, 'existing-key')
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ end
end
it { expect(duplicate_job.check!).to eq('existing-key') }
@@ -99,6 +135,14 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
.from(['existing-key', -1])
end
+ it "does not change the existing wal locations key's TTL" do
+ expect { duplicate_job.check! }
+ .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([wal_locations[:main], -1])
+ .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([wal_locations[:ci], -1])
+ end
+
it 'sets the existing jid' do
duplicate_job.check!
@@ -107,6 +151,117 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
+ describe '#update_latest_wal_location!' do
+ let(:offset) { '1024' }
+
+ before do
+ allow(duplicate_job).to receive(:pg_wal_lsn_diff).with(:main).and_return(offset)
+ allow(duplicate_job).to receive(:pg_wal_lsn_diff).with(:ci).and_return(offset)
+ end
+
+ shared_examples 'updates wal location' do
+ it 'updates a wal location to redis with an offset' do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from(existing_wal_with_offset[:main])
+ .to(new_wal_with_offset[:main])
+ .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from(existing_wal_with_offset[:ci])
+ .to(new_wal_with_offset[:ci])
+ end
+ end
+
+ context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
+ before do
+ stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
+ end
+
+ it "doesn't call Sidekiq.redis" do
+ expect(Sidekiq).not_to receive(:redis)
+
+ duplicate_job.update_latest_wal_location!
+ end
+
+ it "doesn't update a wal location to redis with an offset" do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from([])
+ .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from([])
+ end
+ end
+
+ context "when the key doesn't exists in redis" do
+ include_examples 'updates wal location' do
+ let(:existing_wal_with_offset) { { main: [], ci: [] } }
+ let(:new_wal_with_offset) { wal_locations.transform_values { |v| [v, offset] } }
+ end
+ end
+
+ context "when the key exists in redis" do
+ let(:existing_offset) { '1023'}
+ let(:existing_wal_locations) do
+ {
+ main: '0/D525E3NM',
+ ci: 'AB/111112'
+ }
+ end
+
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), existing_wal_locations[:main], existing_offset)
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), existing_wal_locations[:ci], existing_offset)
+ end
+
+ context "when the new offset is bigger then the existing one" do
+ include_examples 'updates wal location' do
+ let(:existing_wal_with_offset) { existing_wal_locations.transform_values { |v| [v, existing_offset] } }
+ let(:new_wal_with_offset) { wal_locations.transform_values { |v| [v, offset] } }
+ end
+ end
+
+ context "when the old offset is not bigger then the existing one" do
+ let(:existing_offset) { offset }
+
+ it "does not update a wal location to redis with an offset" do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from([existing_wal_locations[:main], existing_offset])
+ .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from([existing_wal_locations[:ci], existing_offset])
+ end
+ end
+ end
+ end
+
+ describe '#latest_wal_locations' do
+ context 'when job was deduplicated and wal locations were already persisted' do
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
+ end
+
+ it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
+ end
+
+ context 'when job is not deduplication and wal locations were not persisted' do
+ it { expect(duplicate_job.latest_wal_locations).to be_empty }
+ end
+
+ context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
+ before do
+ stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
+ end
+
+ it "doesn't call Sidekiq.redis" do
+ expect(Sidekiq).not_to receive(:redis)
+
+ duplicate_job.latest_wal_locations
+ end
+
+ it { expect(duplicate_job.latest_wal_locations).to eq({}) }
+ end
+ end
+
describe '#delete!' do
context "when we didn't track the definition" do
it { expect { duplicate_job.delete! }.not_to raise_error }
@@ -115,14 +270,79 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
context 'when the key exists in redis' do
before do
set_idempotency_key(idempotency_key, 'existing-jid')
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
+ end
end
shared_examples 'deleting the duplicate job' do
- it 'removes the key from redis' do
- expect { duplicate_job.delete! }
- .to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from(['existing-jid', -1])
- .to([nil, -2])
+ shared_examples 'deleting keys from redis' do |key_name|
+ it "removes the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ .to([nil, -2])
+ end
+ end
+
+ shared_examples 'does not delete key from redis' do |key_name|
+ it "does not remove the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to not_change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ end
+ end
+
+ it_behaves_like 'deleting keys from redis', 'idempotent key' do
+ let(:key) { idempotency_key }
+ let(:from_value) { 'existing-jid' }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
+ let(:key) { wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
+ let(:key) { wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
+ end
+
+ context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do
+ before do
+ stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false)
+ end
+
+ it_behaves_like 'does not delete key from redis', 'latest wal location keys for main database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'does not delete key from redis', 'latest wal location keys for ci database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
+ end
+
+ it_behaves_like 'does not delete key from redis', 'latest wal location keys for main database' do
+ let(:key) { wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'does not delete key from redis', 'latest wal location keys for ci database' do
+ let(:key) { wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
+ end
end
end
@@ -254,10 +474,22 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
+ def existing_wal_location_key(idempotency_key, config_name)
+ "#{idempotency_key}:#{config_name}:existing_wal_location"
+ end
+
+ def wal_location_key(idempotency_key, config_name)
+ "#{idempotency_key}:#{config_name}:wal_location"
+ end
+
def set_idempotency_key(key, value = '1')
Sidekiq.redis { |r| r.set(key, value) }
end
+ def rpush_to_redis_key(key, wal, offset)
+ Sidekiq.redis { |r| r.rpush(key, [wal, offset]) }
+ end
+
def read_idempotency_key_with_ttl(key)
Sidekiq.redis do |redis|
redis.pipelined do |p|
@@ -266,4 +498,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
end
+
+ def read_range_from_redis(key)
+ Sidekiq.redis do |redis|
+ redis.lrange(key, 0, -1)
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
index b3d463b6f6b..9772255fc50 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut
describe '#perform' do
let(:proc) { -> {} }
+ before do
+ allow(fake_duplicate_job).to receive(:latest_wal_locations).and_return( {} )
+ end
+
it 'deletes the lock after executing' do
expect(proc).to receive(:call).ordered
expect(fake_duplicate_job).to receive(:delete!).ordered
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
index d45b6c5fcd1..c4045b8c63b 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut
describe '#perform' do
let(:proc) { -> {} }
+ before do
+ allow(fake_duplicate_job).to receive(:latest_wal_locations).and_return( {} )
+ end
+
it 'deletes the lock before executing' do
expect(fake_duplicate_job).to receive(:delete!).ordered
expect(proc).to receive(:call).ordered
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
index 440eca10a88..abbfb9cd9fa 100644
--- a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
+RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator, :aggregate_failures do
let(:base_payload) do
{
"class" => "ARandomWorker",
@@ -31,10 +31,35 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
before do
+ # Settings aren't in the database in specs, but stored in memory, this is fine
+ # for these tests.
+ allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_return(true)
stub_const("TestSizeLimiterWorker", worker_class)
end
describe '#initialize' do
+ context 'configuration from application settings' do
+ let(:validator) { described_class.new(worker_class, job_payload) }
+
+ it 'has the right defaults' do
+ expect(validator.mode).to eq(described_class::COMPRESS_MODE)
+ expect(validator.compression_threshold).to eq(described_class::DEFAULT_COMPRESSION_THRESHOLD_BYTES)
+ expect(validator.size_limit).to eq(described_class::DEFAULT_SIZE_LIMIT)
+ end
+
+ it 'allows configuration through application settings' do
+ stub_application_setting(
+ sidekiq_job_limiter_mode: 'track',
+ sidekiq_job_limiter_compression_threshold_bytes: 1,
+ sidekiq_job_limiter_limit_bytes: 2
+ )
+
+ expect(validator.mode).to eq(described_class::TRACK_MODE)
+ expect(validator.compression_threshold).to eq(1)
+ expect(validator.size_limit).to eq(2)
+ end
+ end
+
context 'when the input mode is valid' do
it 'does not log a warning message' do
expect(::Sidekiq.logger).not_to receive(:warn)
@@ -58,7 +83,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
it 'defaults to track mode' do
expect(::Sidekiq.logger).not_to receive(:warn)
- validator = described_class.new(TestSizeLimiterWorker, job_payload)
+ validator = described_class.new(TestSizeLimiterWorker, job_payload, mode: nil)
expect(validator.mode).to eql('track')
end
@@ -74,10 +99,12 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
context 'when the size input is invalid' do
- it 'defaults to 0 and logs a warning message' do
+ it 'logs a warning message' do
expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter limit: -1')
- described_class.new(TestSizeLimiterWorker, job_payload, size_limit: -1)
+ validator = described_class.new(TestSizeLimiterWorker, job_payload, size_limit: -1)
+
+ expect(validator.size_limit).to be(0)
end
end
@@ -85,9 +112,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
it 'defaults to 0' do
expect(::Sidekiq.logger).not_to receive(:warn)
- validator = described_class.new(TestSizeLimiterWorker, job_payload)
+ validator = described_class.new(TestSizeLimiterWorker, job_payload, size_limit: nil)
- expect(validator.size_limit).to be(0)
+ expect(validator.size_limit).to be(described_class::DEFAULT_SIZE_LIMIT)
end
end
@@ -258,6 +285,22 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
end
+ context 'when job size is bigger than compression threshold and size limit is 0' do
+ let(:size_limit) { 0 }
+ let(:args) { { a: 'a' * 300 } }
+ let(:job) { job_payload(args) }
+
+ it 'does not raise an exception and compresses the arguments' do
+ expect(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).to receive(:compress).with(
+ job, Sidekiq.dump_json(args)
+ ).and_return('a' * 40)
+
+ expect do
+ validate.call(TestSizeLimiterWorker, job)
+ end.not_to raise_error
+ end
+ end
+
context 'when the job was already compressed' do
let(:job) do
job_payload({ a: 'a' * 10 })
@@ -275,7 +318,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
let(:args) { { a: 'a' * 3000 } }
let(:job) { job_payload(args) }
- it 'does not raise an exception' do
+ it 'raises an exception' do
expect(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).to receive(:compress).with(
job, Sidekiq.dump_json(args)
).and_return('a' * 60)
@@ -284,24 +327,46 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
validate.call(TestSizeLimiterWorker, job)
end.to raise_error(Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError)
end
+
+ it 'does not raise an exception when the worker allows big payloads' do
+ worker_class.big_payload!
+
+ expect(::Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).to receive(:compress).with(
+ job, Sidekiq.dump_json(args)
+ ).and_return('a' * 60)
+
+ expect do
+ validate.call(TestSizeLimiterWorker, job)
+ end.not_to raise_error
+ end
end
end
end
- describe '#validate!' do
- context 'when calling SizeLimiter.validate!' do
- let(:validate) { ->(worker_clas, job) { described_class.validate!(worker_class, job) } }
+ describe '.validate!' do
+ let(:validate) { ->(worker_class, job) { described_class.validate!(worker_class, job) } }
+ it_behaves_like 'validate limit job payload size' do
before do
- stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_MODE', mode)
- stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_LIMIT_BYTES', size_limit)
- stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_COMPRESSION_THRESHOLD_BYTES', compression_threshold)
+ stub_application_setting(
+ sidekiq_job_limiter_mode: mode,
+ sidekiq_job_limiter_compression_threshold_bytes: compression_threshold,
+ sidekiq_job_limiter_limit_bytes: size_limit
+ )
end
+ end
- it_behaves_like 'validate limit job payload size'
+ it "skips background migrations" do
+ expect(described_class).not_to receive(:new)
+
+ described_class::EXEMPT_WORKER_NAMES.each do |class_name|
+ validate.call(class_name.constantize, job_payload)
+ end
end
+ end
- context 'when creating an instance with the related ENV variables' do
+ describe '#validate!' do
+ context 'when creating an instance with the related configuration variables' do
let(:validate) do
->(worker_clas, job) do
described_class.new(worker_class, job).validate!
@@ -309,9 +374,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
before do
- stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_MODE', mode)
- stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_LIMIT_BYTES', size_limit)
- stub_env('GITLAB_SIDEKIQ_SIZE_LIMITER_COMPRESSION_THRESHOLD_BYTES', compression_threshold)
+ stub_application_setting(
+ sidekiq_job_limiter_mode: mode,
+ sidekiq_job_limiter_compression_threshold_bytes: compression_threshold,
+ sidekiq_job_limiter_limit_bytes: size_limit
+ )
end
it_behaves_like 'validate limit job payload size'
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 5e4e79e818e..8285cf960d2 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -66,12 +66,12 @@ RSpec.describe Gitlab::SidekiqMiddleware do
::Gitlab::SidekiqMiddleware::BatchLoader,
::Labkit::Middleware::Sidekiq::Server,
::Gitlab::SidekiqMiddleware::InstrumentationLogger,
- ::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware,
::Gitlab::SidekiqMiddleware::AdminMode::Server,
::Gitlab::SidekiqVersioning::Middleware,
::Gitlab::SidekiqStatus::ServerMiddleware,
::Gitlab::SidekiqMiddleware::WorkerContext::Server,
- ::Gitlab::SidekiqMiddleware::DuplicateJobs::Server
+ ::Gitlab::SidekiqMiddleware::DuplicateJobs::Server,
+ ::Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
]
end
@@ -177,12 +177,12 @@ RSpec.describe Gitlab::SidekiqMiddleware do
[
::Gitlab::SidekiqMiddleware::WorkerContext::Client,
::Labkit::Middleware::Sidekiq::Client,
+ ::Gitlab::Database::LoadBalancing::SidekiqClientMiddleware,
::Gitlab::SidekiqMiddleware::DuplicateJobs::Client,
::Gitlab::SidekiqStatus::ClientMiddleware,
::Gitlab::SidekiqMiddleware::AdminMode::Client,
::Gitlab::SidekiqMiddleware::SizeLimiter::Client,
- ::Gitlab::SidekiqMiddleware::ClientMetrics,
- ::Gitlab::Database::LoadBalancing::SidekiqClientMiddleware
+ ::Gitlab::SidekiqMiddleware::ClientMetrics
]
end
diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb
index 2ab32657f0e..5e91282612e 100644
--- a/spec/lib/gitlab/sidekiq_queue_spec.rb
+++ b/spec/lib/gitlab/sidekiq_queue_spec.rb
@@ -4,15 +4,15 @@ require 'spec_helper'
RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
around do |example|
- Sidekiq::Queue.new('authorized_projects').clear
+ Sidekiq::Queue.new('default').clear
Sidekiq::Testing.disable!(&example)
- Sidekiq::Queue.new('authorized_projects').clear
+ Sidekiq::Queue.new('default').clear
end
- def add_job(user, args)
+ def add_job(args, user:, klass: 'AuthorizedProjectsWorker')
Sidekiq::Client.push(
- 'class' => 'AuthorizedProjectsWorker',
- 'queue' => 'authorized_projects',
+ 'class' => klass,
+ 'queue' => 'default',
'args' => args,
'meta.user' => user.username
)
@@ -20,13 +20,13 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
describe '#drop_jobs!' do
shared_examples 'queue processing' do
- let(:sidekiq_queue) { described_class.new('authorized_projects') }
+ let(:sidekiq_queue) { described_class.new('default') }
let_it_be(:sidekiq_queue_user) { create(:user) }
before do
- add_job(create(:user), [1])
- add_job(sidekiq_queue_user, [2])
- add_job(sidekiq_queue_user, [3])
+ add_job([1], user: create(:user))
+ add_job([2], user: sidekiq_queue_user, klass: 'MergeWorker')
+ add_job([3], user: sidekiq_queue_user)
end
context 'when the queue is not processed in time' do
@@ -68,11 +68,19 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
end
end
+ context 'when there are jobs matching the class name' do
+ include_examples 'queue processing' do
+ let(:search_metadata) { { user: sidekiq_queue_user.username, worker_class: 'AuthorizedProjectsWorker' } }
+ let(:timeout_deleted) { 1 }
+ let(:no_timeout_deleted) { 1 }
+ end
+ end
+
context 'when there are no valid metadata keys passed' do
it 'raises NoMetadataError' do
- add_job(create(:user), [1])
+ add_job([1], user: create(:user))
- expect { described_class.new('authorized_projects').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
+ expect { described_class.new('default').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::NoMetadataError)
end
end
diff --git a/spec/lib/gitlab/tracking/snowplow_schema_validation_spec.rb b/spec/lib/gitlab/tracking/snowplow_schema_validation_spec.rb
new file mode 100644
index 00000000000..32c601ae47d
--- /dev/null
+++ b/spec/lib/gitlab/tracking/snowplow_schema_validation_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Snowplow Schema Validation' do
+ context 'snowplow events definition' do
+ shared_examples 'matches schema' do
+ it 'conforms schema json' do
+ paths = Dir[Rails.root.join(yaml_path)]
+
+ events = paths.each_with_object([]) do |path, metrics|
+ metrics.push(
+ YAML.safe_load(File.read(path), aliases: true)
+ )
+ end
+
+ expect(events).to all match_schema(Rails.root.join('config/events/schema.json'))
+ end
+ end
+
+ describe 'matches the schema for CE' do
+ let(:yaml_path) { 'config/events/*.yml' }
+
+ it_behaves_like 'matches schema'
+ end
+
+ describe 'matches the schema for EE' do
+ let(:yaml_path) { 'ee/config/events/*.yml' }
+
+ it_behaves_like 'matches schema'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index a0fb6a270a5..ca7a6b6b1c3 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -87,8 +87,26 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
end
- it 'does not contain any ids' do
- expect(snowplow_context.to_json[:data].keys).not_to include(:user_id, :project_id, :namespace_id)
+ it 'does not contain user id' do
+ expect(snowplow_context.to_json[:data].keys).not_to include(:user_id)
+ end
+
+ it 'contains namespace and project ids' do
+ expect(snowplow_context.to_json[:data].keys).to include(:project_id, :namespace_id)
+ end
+
+ it 'accepts just project id as integer' do
+ expect { described_class.new(project: 1).to_context }.not_to raise_error
+ end
+
+ context 'without add_namespace_and_project_to_snowplow_tracking feature' do
+ before do
+ stub_feature_flags(add_namespace_and_project_to_snowplow_tracking: false)
+ end
+
+ it 'does not contain any ids' do
+ expect(snowplow_context.to_json[:data].keys).not_to include(:user_id, :project_id, :namespace_id)
+ end
end
end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 994316f38ee..02e66458f46 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Tracking do
described_class.instance_variable_set("@snowplow", nil)
end
- describe '.snowplow_options' do
+ describe '.options' do
it 'returns useful client options' do
expected_fields = {
namespace: 'gl',
@@ -22,13 +22,13 @@ RSpec.describe Gitlab::Tracking do
linkClickTracking: true
}
- expect(subject.snowplow_options(nil)).to match(expected_fields)
+ expect(subject.options(nil)).to match(expected_fields)
end
it 'when feature flag is disabled' do
stub_feature_flags(additional_snowplow_tracking: false)
- expect(subject.snowplow_options(nil)).to include(
+ expect(subject.options(nil)).to include(
formTracking: false,
linkClickTracking: false
)
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Tracking do
it "delegates to #{klass} destination" do
other_context = double(:context)
- project = double(:project)
+ project = build_stubbed(:project)
user = double(:user)
expect(Gitlab::Tracking::StandardContext)
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index b359eb422d7..8e372ba795b 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -69,6 +69,27 @@ RSpec.describe Gitlab::UrlBuilder do
end
end
+ context 'when passing a compare' do
+ # NOTE: The Compare requires an actual repository, which isn't available
+ # with the `build_stubbed` strategy used by the table tests above
+ let_it_be(:compare) { create(:compare) }
+ let_it_be(:project) { compare.project }
+
+ it 'returns the full URL' do
+ expect(subject.build(compare)).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}/-/compare/#{compare.base_commit_sha}...#{compare.head_commit_sha}")
+ end
+
+ it 'returns only the path if only_path is given' do
+ expect(subject.build(compare, only_path: true)).to eq("/#{project.full_path}/-/compare/#{compare.base_commit_sha}...#{compare.head_commit_sha}")
+ end
+
+ it 'returns an empty string for missing project' do
+ expect(compare).to receive(:project).and_return(nil)
+
+ expect(subject.build(compare)).to eq('')
+ end
+ end
+
context 'when passing a commit without a project' do
let(:commit) { build_stubbed(:commit) }
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 1ae8a0881ef..6406c0b5458 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -9,7 +9,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
value_type: 'string',
product_category: 'collection',
product_stage: 'growth',
- status: 'data_available',
+ status: 'active',
+ milestone: '14.1',
default_generation: 'generation_1',
key_path: 'uuid',
product_group: 'group::product analytics',
@@ -64,6 +65,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:value_type | nil
:value_type | 'test'
:status | nil
+ :milestone | nil
:data_category | nil
:key_path | nil
:product_group | nil
@@ -127,9 +129,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
where(:status, :skip_validation?) do
'deprecated' | true
'removed' | true
- 'data_available' | false
- 'implemented' | false
- 'not_used' | false
+ 'active' | false
end
with_them do
@@ -191,7 +191,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
value_type: 'string',
product_category: 'collection',
product_stage: 'growth',
- status: 'data_available',
+ status: 'active',
+ milestone: '14.1',
default_generation: 'generation_1',
key_path: 'counter.category.event',
product_group: 'group::product analytics',
diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb
index d83f59e4a7d..ea8d1a135a6 100644
--- a/spec/lib/gitlab/usage/metric_spec.rb
+++ b/spec/lib/gitlab/usage/metric_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Usage::Metric do
product_group: "group::plan",
product_category: "issue_tracking",
value_type: "number",
- status: "data_available",
+ status: "active",
time_frame: "all",
data_source: "database",
instrumentation_class: "CountIssuesMetric",
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/service_ping_features_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/service_ping_features_metric_spec.rb
new file mode 100644
index 00000000000..40e9b962878
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/service_ping_features_metric_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ServicePingFeaturesMetric do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:usage_ping_features_enabled, :expected_value) do
+ true | true
+ false | false
+ end
+
+ with_them do
+ before do
+ stub_application_setting(usage_ping_features_enabled: usage_ping_features_enabled)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
index d4148b57348..4996b0a0089 100644
--- a/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb
@@ -77,11 +77,22 @@ RSpec.describe Gitlab::UsageDataCounters::CiTemplateUniqueCounter do
let(:project_id) { 1 }
let(:config_source) { :repository_source }
- Dir.glob(File.join('lib', 'gitlab', 'ci', 'templates', '**'), base: Rails.root) do |template|
+ described_class.ci_templates.each do |template|
next if described_class::TEMPLATE_TO_EVENT.key?(template)
- it "does not track #{template}" do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to(receive(:track_event))
+ it "has an event defined for #{template}" do
+ expect do
+ described_class.track_unique_project_event(
+ project_id: project_id,
+ template: template,
+ config_source: config_source
+ )
+ end.not_to raise_error
+ end
+
+ it "tracks #{template}" do
+ expected_template_event_name = described_class.ci_template_event_name(template, :repository_source)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to(receive(:track_event)).with(expected_template_event_name, values: project_id)
described_class.track_unique_project_event(project_id: project_id, template: template, config_source: config_source)
end
diff --git a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
index a1dee442131..c4a84445a01 100644
--- a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Code review events' do
code_review_events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category("code_review")
- exceptions = %w[i_code_review_mr_diffs i_code_review_mr_single_file_diffs]
+ exceptions = %w[i_code_review_mr_diffs i_code_review_mr_single_file_diffs i_code_review_total_suggestions_applied i_code_review_total_suggestions_added]
code_review_aggregated_events += exceptions
expect(code_review_events - code_review_aggregated_events).to be_empty
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 887759014f5..427dd4a205e 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -462,6 +462,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
allow(described_class).to receive(:known_events).and_return(known_events)
allow(described_class).to receive(:categories).and_return(%w(category1 category2))
+ stub_const('Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_FOR_TOTALS', %w(category1 category2))
+
described_class.track_event('event1_slot', values: entity1, time: 2.days.ago)
described_class.track_event('event2_slot', values: entity2, time: 2.days.ago)
described_class.track_event('event2_slot', values: entity3, time: 2.weeks.ago)
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index 041fc2f20a8..cd3388701fe 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -206,18 +206,32 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
end
describe '.track_add_suggestion_action' do
- subject { described_class.track_add_suggestion_action(user: user) }
+ subject { described_class.track_add_suggestion_action(note: note) }
+
+ before do
+ note.suggestions << build(:suggestion, id: 1, note: note)
+ end
it_behaves_like 'a tracked merge request unique event' do
- let(:action) { described_class::MR_ADD_SUGGESTION_ACTION }
+ let(:action) { described_class::MR_USER_ADD_SUGGESTION_ACTION }
+ end
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_TOTAL_ADD_SUGGESTION_ACTION }
end
end
describe '.track_apply_suggestion_action' do
- subject { described_class.track_apply_suggestion_action(user: user) }
+ subject { described_class.track_apply_suggestion_action(user: user, suggestions: suggestions) }
+
+ let(:suggestions) { [build(:suggestion, id: 1, note: note)] }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_USER_APPLY_SUGGESTION_ACTION }
+ end
it_behaves_like 'a tracked merge request unique event' do
- let(:action) { described_class::MR_APPLY_SUGGESTION_ACTION }
+ let(:action) { described_class::MR_TOTAL_APPLY_SUGGESTION_ACTION }
end
end
@@ -394,4 +408,12 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:action) { described_class::MR_RESOLVE_CONFLICT_ACTION }
end
end
+
+ describe '.track_resolve_thread_in_issue_action' do
+ subject { described_class.track_resolve_thread_in_issue_action(user: user) }
+
+ it_behaves_like 'a tracked merge request unique event' do
+ let(:action) { described_class::MR_RESOLVE_THREAD_IN_ISSUE_ACTION }
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 5d85ad5ad01..a70b68a181f 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -1089,6 +1089,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:settings][:collected_data_categories]).to eq(expected_value)
end
+
+ it 'gathers service_ping_features_enabled' do
+ expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled)
+ end
end
end
@@ -1279,9 +1283,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.redis_hll_counters }
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
- let(:ineligible_total_categories) do
- %w[source_code ci_secrets_management incident_management_alerts snippets terraform incident_management_oncall secure network_policies]
- end
context 'with redis_hll_tracking feature enabled' do
it 'has all known_events' do
@@ -1296,7 +1297,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
- if ineligible_total_categories.exclude?(category)
+ if ::Gitlab::UsageDataCounters::HLLRedisCounter::CATEGORIES_FOR_TOTALS.include?(category)
metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
end
diff --git a/spec/lib/gitlab/x509/tag_spec.rb b/spec/lib/gitlab/x509/tag_spec.rb
index be120aaf16a..f52880cfc52 100644
--- a/spec/lib/gitlab/x509/tag_spec.rb
+++ b/spec/lib/gitlab/x509/tag_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::X509::Tag do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
let(:project) { create(:project, :repository) }
- shared_examples 'signed tag' do
+ describe 'signed tag' do
let(:tag) { project.repository.find_tag('v1.1.1') }
let(:certificate_attributes) do
{
@@ -33,24 +33,10 @@ RSpec.describe Gitlab::X509::Tag do
it { expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) }
end
- shared_examples 'unsigned tag' do
+ describe 'unsigned tag' do
let(:tag) { project.repository.find_tag('v1.0.0') }
it { expect(signature).to be_nil }
end
-
- context 'with :get_tag_signatures enabled' do
- it_behaves_like 'signed tag'
- it_behaves_like 'unsigned tag'
- end
-
- context 'with :get_tag_signatures disabled' do
- before do
- stub_feature_flags(get_tag_signatures: false)
- end
-
- it_behaves_like 'signed tag'
- it_behaves_like 'unsigned tag'
- end
end
end
diff --git a/spec/lib/gitlab/zentao/client_spec.rb b/spec/lib/gitlab/zentao/client_spec.rb
new file mode 100644
index 00000000000..e3a335c1e89
--- /dev/null
+++ b/spec/lib/gitlab/zentao/client_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Zentao::Client do
+ subject(:integration) { described_class.new(zentao_integration) }
+
+ let(:zentao_integration) { create(:zentao_integration) }
+ let(:mock_get_products_url) { integration.send(:url, "products/#{zentao_integration.zentao_product_xid}") }
+
+ describe '#new' do
+ context 'if integration is nil' do
+ let(:zentao_integration) { nil }
+
+ it 'raises ConfigError' do
+ expect { integration }.to raise_error(described_class::ConfigError)
+ end
+ end
+
+ context 'integration is provided' do
+ it 'is initialized successfully' do
+ expect { integration }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#fetch_product' do
+ let(:mock_headers) do
+ {
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'Token' => zentao_integration.api_token
+ }
+ }
+ end
+
+ context 'with valid product' do
+ let(:mock_response) { { 'id' => zentao_integration.zentao_product_xid } }
+
+ before do
+ WebMock.stub_request(:get, mock_get_products_url)
+ .with(mock_headers).to_return(status: 200, body: mock_response.to_json)
+ end
+
+ it 'fetches the product' do
+ expect(integration.fetch_product(zentao_integration.zentao_product_xid)).to eq mock_response
+ end
+ end
+
+ context 'with invalid product' do
+ before do
+ WebMock.stub_request(:get, mock_get_products_url)
+ .with(mock_headers).to_return(status: 404, body: {}.to_json)
+ end
+
+ it 'fetches the empty product' do
+ expect(integration.fetch_product(zentao_integration.zentao_product_xid)).to eq({})
+ end
+ end
+
+ context 'with invalid response' do
+ before do
+ WebMock.stub_request(:get, mock_get_products_url)
+ .with(mock_headers).to_return(status: 200, body: '[invalid json}')
+ end
+
+ it 'fetches the empty product' do
+ expect(integration.fetch_product(zentao_integration.zentao_product_xid)).to eq({})
+ end
+ end
+ end
+
+ describe '#ping' do
+ let(:mock_headers) do
+ {
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'Token' => zentao_integration.api_token
+ }
+ }
+ end
+
+ context 'with valid resource' do
+ before do
+ WebMock.stub_request(:get, mock_get_products_url)
+ .with(mock_headers).to_return(status: 200, body: { 'deleted' => '0' }.to_json)
+ end
+
+ it 'responds with success' do
+ expect(integration.ping[:success]).to eq true
+ end
+ end
+
+ context 'with deleted resource' do
+ before do
+ WebMock.stub_request(:get, mock_get_products_url)
+ .with(mock_headers).to_return(status: 200, body: { 'deleted' => '1' }.to_json)
+ end
+
+ it 'responds with unsuccess' do
+ expect(integration.ping[:success]).to eq false
+ end
+ end
+ end
+end
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index dd57cd7980e..3f39d969dbd 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -42,7 +42,8 @@ RSpec.describe 'Marginalia spec' do
{
"application" => "test",
"endpoint_id" => "MarginaliaTestController#first_user",
- "correlation_id" => correlation_id
+ "correlation_id" => correlation_id,
+ "db_config_name" => "main"
}
end
@@ -51,6 +52,29 @@ RSpec.describe 'Marginalia spec' do
expect(recorded.log.last).to include("#{component}:#{value}")
end
end
+
+ context 'when using CI database' do
+ let(:component_map) do
+ {
+ "application" => "test",
+ "endpoint_id" => "MarginaliaTestController#first_user",
+ "correlation_id" => correlation_id,
+ "db_config_name" => "ci"
+ }
+ end
+
+ before do |example|
+ skip_if_multiple_databases_not_setup
+
+ allow(User).to receive(:connection) { Ci::CiDatabaseRecord.connection }
+ end
+
+ it 'generates a query that includes the component and value' do
+ component_map.each do |component, value|
+ expect(recorded.log.last).to include("#{component}:#{value}")
+ end
+ end
+ end
end
describe 'for Sidekiq worker jobs' do
@@ -79,7 +103,8 @@ RSpec.describe 'Marginalia spec' do
"application" => "sidekiq",
"endpoint_id" => "MarginaliaTestJob",
"correlation_id" => sidekiq_job['correlation_id'],
- "jid" => sidekiq_job['jid']
+ "jid" => sidekiq_job['jid'],
+ "db_config_name" => "main"
}
end
@@ -100,9 +125,10 @@ RSpec.describe 'Marginalia spec' do
let(:component_map) do
{
- "application" => "sidekiq",
- "endpoint_id" => "ActionMailer::MailDeliveryJob",
- "jid" => delivery_job.job_id
+ "application" => "sidekiq",
+ "endpoint_id" => "ActionMailer::MailDeliveryJob",
+ "jid" => delivery_job.job_id,
+ "db_config_name" => "main"
}
end
diff --git a/spec/lib/object_storage/config_spec.rb b/spec/lib/object_storage/config_spec.rb
index 0ead2a1d269..21b8a44b3d6 100644
--- a/spec/lib/object_storage/config_spec.rb
+++ b/spec/lib/object_storage/config_spec.rb
@@ -188,6 +188,7 @@ RSpec.describe ObjectStorage::Config do
end
context 'with SSE-KMS enabled' do
+ it { expect(subject.aws_server_side_encryption_enabled?).to be true }
it { expect(subject.server_side_encryption).to eq('AES256') }
it { expect(subject.server_side_encryption_kms_key_id).to eq('arn:aws:12345') }
it { expect(subject.fog_attributes.keys).to match_array(%w(x-amz-server-side-encryption x-amz-server-side-encryption-aws-kms-key-id)) }
@@ -196,6 +197,7 @@ RSpec.describe ObjectStorage::Config do
context 'with only server side encryption enabled' do
let(:storage_options) { { server_side_encryption: 'AES256' } }
+ it { expect(subject.aws_server_side_encryption_enabled?).to be true }
it { expect(subject.server_side_encryption).to eq('AES256') }
it { expect(subject.server_side_encryption_kms_key_id).to be_nil }
it { expect(subject.fog_attributes).to eq({ 'x-amz-server-side-encryption' => 'AES256' }) }
@@ -204,6 +206,7 @@ RSpec.describe ObjectStorage::Config do
context 'without encryption enabled' do
let(:storage_options) { {} }
+ it { expect(subject.aws_server_side_encryption_enabled?).to be false }
it { expect(subject.server_side_encryption).to be_nil }
it { expect(subject.server_side_encryption_kms_key_id).to be_nil }
it { expect(subject.fog_attributes).to eq({}) }
@@ -215,6 +218,5 @@ RSpec.describe ObjectStorage::Config do
end
it { expect(subject.enabled?).to be false }
- it { expect(subject.fog_attributes).to eq({}) }
end
end
diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb
index 1db80351e45..eb6a68f1afd 100644
--- a/spec/lib/sidebars/menu_spec.rb
+++ b/spec/lib/sidebars/menu_spec.rb
@@ -198,4 +198,27 @@ RSpec.describe Sidebars::Menu do
end
end
end
+
+ describe '#link' do
+ let(:foo_path) { '/foo_path'}
+
+ let(:foo_menu) do
+ ::Sidebars::MenuItem.new(
+ title: 'foo',
+ link: foo_path,
+ active_routes: {},
+ item_id: :foo
+ )
+ end
+
+ it 'returns first visible menu item link' do
+ menu.add_item(foo_menu)
+
+ expect(menu.link).to eq foo_path
+ end
+
+ it 'returns nil if there are no visible menu items' do
+ expect(menu.link).to be_nil
+ end
+ end
end
diff --git a/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb b/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb
index 231e5a850c2..36a76e70a48 100644
--- a/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb
@@ -4,15 +4,13 @@ require 'spec_helper'
RSpec.describe Sidebars::Projects::Menus::LearnGitlabMenu do
let_it_be(:project) { build(:project) }
- let_it_be(:experiment_enabled) { true }
- let_it_be(:tracking_category) { 'Growth::Activation::Experiment::LearnGitLabB' }
+ let_it_be(:learn_gitlab_enabled) { true }
let(:context) do
Sidebars::Projects::Context.new(
current_user: nil,
container: project,
- learn_gitlab_experiment_enabled: experiment_enabled,
- learn_gitlab_experiment_tracking_category: tracking_category
+ learn_gitlab_enabled: learn_gitlab_enabled
)
end
@@ -27,7 +25,6 @@ RSpec.describe Sidebars::Projects::Menus::LearnGitlabMenu do
{
class: 'home',
data: {
- track_property: tracking_category,
track_label: 'learn_gitlab'
}
}
@@ -46,7 +43,7 @@ RSpec.describe Sidebars::Projects::Menus::LearnGitlabMenu do
end
context 'when learn gitlab experiment is disabled' do
- let(:experiment_enabled) { false }
+ let(:learn_gitlab_enabled) { false }
it 'returns false' do
expect(subject.render?).to eq false
@@ -62,7 +59,7 @@ RSpec.describe Sidebars::Projects::Menus::LearnGitlabMenu do
end
context 'when learn gitlab experiment is disabled' do
- let(:experiment_enabled) { false }
+ let(:learn_gitlab_enabled) { false }
it 'returns false' do
expect(subject.has_pill?).to eq false
diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
index 381842be5ab..77efe99aaa9 100644
--- a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
@@ -49,25 +49,6 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
end
end
- describe '#link' do
- let(:foo_path) { '/foo_path'}
-
- let(:foo_menu) do
- ::Sidebars::MenuItem.new(
- title: 'foo',
- link: foo_path,
- active_routes: {},
- item_id: :foo
- )
- end
-
- it 'returns first visible item link' do
- subject.insert_element_before(subject.renderable_items, subject.renderable_items.first.item_id, foo_menu)
-
- expect(subject.link).to eq foo_path
- end
- end
-
context 'Menu items' do
subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
index 9b79614db20..3079c781d73 100644
--- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -158,5 +158,31 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
end
end
end
+
+ describe 'Usage Quotas' do
+ let(:item_id) { :usage_quotas }
+
+ describe 'with project_storage_ui feature flag enabled' do
+ before do
+ stub_feature_flags(project_storage_ui: true)
+ end
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'with project_storage_ui feature flag disabled' do
+ before do
+ stub_feature_flags(project_storage_ui: false)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+ end
end
end
diff --git a/spec/lib/system_check/incoming_email_check_spec.rb b/spec/lib/system_check/incoming_email_check_spec.rb
new file mode 100644
index 00000000000..710702b93fc
--- /dev/null
+++ b/spec/lib/system_check/incoming_email_check_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SystemCheck::IncomingEmailCheck do
+ before do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+ end
+
+ describe '#multi_check' do
+ context 'when incoming e-mail is disabled' do
+ before do
+ stub_incoming_email_setting(enabled: false)
+ end
+
+ it 'does not run any checks' do
+ expect(SystemCheck).not_to receive(:run)
+
+ subject.multi_check
+ end
+ end
+
+ context 'when incoming e-mail is enabled for IMAP' do
+ before do
+ stub_incoming_email_setting(enabled: true)
+ end
+
+ it 'runs IMAP and mailroom checks' do
+ expect(SystemCheck).to receive(:run).with('Reply by email', [
+ SystemCheck::IncomingEmail::ImapAuthenticationCheck,
+ SystemCheck::IncomingEmail::InitdConfiguredCheck,
+ SystemCheck::IncomingEmail::MailRoomRunningCheck
+ ])
+
+ subject.multi_check
+ end
+ end
+
+ context 'when incoming e-mail is enabled for Microsoft Graph' do
+ before do
+ stub_incoming_email_setting(enabled: true, inbox_method: 'microsoft_graph')
+ end
+
+ it 'runs mailroom checks' do
+ expect(SystemCheck).to receive(:run).with('Reply by email', [
+ SystemCheck::IncomingEmail::InitdConfiguredCheck,
+ SystemCheck::IncomingEmail::MailRoomRunningCheck
+ ])
+
+ subject.multi_check
+ end
+ end
+ end
+end