summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/basic_project_details_spec.rb27
-rw-r--r--spec/lib/api/entities/bulk_import_spec.rb19
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_failure_spec.rb19
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_spec.rb26
-rw-r--r--spec/lib/api/entities/ci/job_request/image_spec.rb (renamed from spec/lib/api/entities/job_request/image_spec.rb)2
-rw-r--r--spec/lib/api/entities/ci/job_request/port_spec.rb (renamed from spec/lib/api/entities/job_request/port_spec.rb)2
-rw-r--r--spec/lib/api/entities/group_detail_spec.rb19
-rw-r--r--spec/lib/api/entities/plan_limit_spec.rb3
-rw-r--r--spec/lib/api/entities/user_spec.rb2
-rw-r--r--spec/lib/api/helpers/caching_spec.rb138
-rw-r--r--spec/lib/backup/database_spec.rb2
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb41
-rw-r--r--spec/lib/backup/repositories_spec.rb19
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb73
-rw-r--r--spec/lib/banzai/filter/upload_link_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/wiki_link_filter_spec.rb18
-rw-r--r--spec/lib/banzai/reference_parser/base_parser_spec.rb33
-rw-r--r--spec/lib/bulk_imports/clients/graphql_spec.rb41
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb54
-rw-r--r--spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb1
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb77
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb8
-rw-r--r--spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb28
-rw-r--r--spec/lib/bulk_imports/ndjson_pipeline_spec.rb7
-rw-r--r--spec/lib/bulk_imports/stage_spec.rb1
-rw-r--r--spec/lib/bulk_imports/users_mapper_spec.rb68
-rw-r--r--spec/lib/error_tracking/collector/sentry_request_parser_spec.rb44
-rw-r--r--spec/lib/extracts_path_spec.rb73
-rw-r--r--spec/lib/extracts_ref_spec.rb1
-rw-r--r--spec/lib/gitlab/analytics/unique_visits_spec.rb81
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb24
-rw-r--r--spec/lib/gitlab/auth/ldap/adapter_spec.rb75
-rw-r--r--spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb47
-rw-r--r--spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb6
-rw-r--r--spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb22
-rw-r--r--spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb400
-rw-r--r--spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb2
-rw-r--r--spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb41
-rw-r--r--spec/lib/gitlab/cache/helpers_spec.rb49
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb24
-rw-r--r--spec/lib/gitlab/changelog/config_spec.rb12
-rw-r--r--spec/lib/gitlab/checks/container_moved_spec.rb (renamed from spec/lib/gitlab/checks/project_moved_spec.rb)44
-rw-r--r--spec/lib/gitlab/checks/project_created_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/ansi2json/line_spec.rb33
-rw-r--r--spec/lib/gitlab/ci/config/entry/artifacts_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/matching/runner_matcher_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb54
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/reports/security/identifier_spec.rb125
-rw-r--r--spec/lib/gitlab/ci/reports/security/link_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/reports/security/scan_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb30
-rw-r--r--spec/lib/gitlab/ci/reports/security/scanner_spec.rb146
-rw-r--r--spec/lib/gitlab/ci/reports/test_case_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/dag_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb100
-rw-r--r--spec/lib/gitlab/closing_issue_extractor_spec.rb2
-rw-r--r--spec/lib/gitlab/composer/cache_spec.rb1
-rw-r--r--spec/lib/gitlab/consul/internal_spec.rb2
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb30
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb80
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb148
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb26
-rw-r--r--spec/lib/gitlab/database/custom_structure_spec.rb65
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb22
-rw-r--r--spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb5
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb61
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb113
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb83
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb16
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb111
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb121
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_creator_spec.rb96
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb161
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb250
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb48
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb48
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb38
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb2
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb32
-rw-r--r--spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb303
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb18
-rw-r--r--spec/lib/gitlab/database/reindexing/index_selection_spec.rb57
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb134
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb2
-rw-r--r--spec/lib/gitlab/database/schema_migrations/context_spec.rb78
-rw-r--r--spec/lib/gitlab/database/schema_migrations/migrations_spec.rb (renamed from spec/lib/gitlab/database/schema_version_files_spec.rb)41
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb4
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb4
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb4
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb34
-rw-r--r--spec/lib/gitlab/database_spec.rb90
-rw-r--r--spec/lib/gitlab/deploy_key_access_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/file_collection/base_spec.rb27
-rw-r--r--spec/lib/gitlab/diff/file_collection/commit_spec.rb8
-rw-r--r--spec/lib/gitlab/diff/file_collection/compare_spec.rb35
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb36
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb34
-rw-r--r--spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb46
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb12
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb115
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb68
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb122
-rw-r--r--spec/lib/gitlab/git/user_spec.rb43
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb31
-rw-r--r--spec/lib/gitlab/git_access_spec.rb6
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb100
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb33
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb13
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb27
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb7
-rw-r--r--spec/lib/gitlab/github_import/object_counter_spec.rb36
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import_spec.rb2
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb420
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb4
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb1
-rw-r--r--spec/lib/gitlab/highlight_spec.rb7
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml47
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/project/object_builder_spec.rb26
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb21
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml5
-rw-r--r--spec/lib/gitlab/import_export/shared_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb53
-rw-r--r--spec/lib/gitlab/integrations/sti_type_spec.rb8
-rw-r--r--spec/lib/gitlab/jira_import/base_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/jira_import/labels_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb16
-rw-r--r--spec/lib/gitlab/json_cache_spec.rb1
-rw-r--r--spec/lib/gitlab/kas/client_spec.rb19
-rw-r--r--spec/lib/gitlab/kas_spec.rb44
-rw-r--r--spec/lib/gitlab/kroki_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb8
-rw-r--r--spec/lib/gitlab/kubernetes/network_policy_spec.rb8
-rw-r--r--spec/lib/gitlab/language_detection_spec.rb1
-rw-r--r--spec/lib/gitlab/lfs_token_spec.rb2
-rw-r--r--spec/lib/gitlab/memory/instrumentation_spec.rb52
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb13
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb115
-rw-r--r--spec/lib/gitlab/object_hierarchy_spec.rb303
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb141
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb6
-rw-r--r--spec/lib/gitlab/pagination/offset_pagination_spec.rb74
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb7
-rw-r--r--spec/lib/gitlab/prometheus/adapter_spec.rb20
-rw-r--r--spec/lib/gitlab/prometheus/query_variables_spec.rb1
-rw-r--r--spec/lib/gitlab/rate_limit_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/reactive_cache_set_cache_spec.rb1
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb3
-rw-r--r--spec/lib/gitlab/repo_path_spec.rb36
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb10
-rw-r--r--spec/lib/gitlab/search_results_spec.rb9
-rw-r--r--spec/lib/gitlab/shell_spec.rb1
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb39
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb13
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb36
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb209
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb21
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb36
-rw-r--r--spec/lib/gitlab/sidekiq_queue_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb16
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb9
-rw-r--r--spec/lib/gitlab/template_parser/ast_spec.rb (renamed from spec/lib/gitlab/changelog/ast_spec.rb)60
-rw-r--r--spec/lib/gitlab/template_parser/parser_spec.rb (renamed from spec/lib/gitlab/changelog/parser_spec.rb)6
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_spec.rb56
-rw-r--r--spec/lib/gitlab/usage/docs/helper_spec.rb79
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb7
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb15
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb75
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb3
-rw-r--r--spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb16
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb8
-rw-r--r--spec/lib/gitlab/utils_spec.rb16
-rw-r--r--spec/lib/gitlab/wiki_file_finder_spec.rb11
-rw-r--r--spec/lib/marginalia_spec.rb16
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb10
-rw-r--r--spec/lib/security/ci_configuration/sast_build_action_spec.rb9
-rw-r--r--spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb4
-rw-r--r--spec/lib/serializers/symbolized_json_spec.rb41
-rw-r--r--spec/lib/sidebars/projects/menus/deployments_menu_spec.rb11
-rw-r--r--spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb93
-rw-r--r--spec/lib/sidebars/projects/menus/issues_menu_spec.rb18
-rw-r--r--spec/lib/sidebars/projects/menus/labels_menu_spec.rb61
-rw-r--r--spec/lib/sidebars/projects/menus/members_menu_spec.rb35
-rw-r--r--spec/lib/sidebars/projects/menus/monitor_menu_spec.rb107
-rw-r--r--spec/lib/sidebars/projects/menus/project_information_menu_spec.rb59
-rw-r--r--spec/lib/sidebars/projects/menus/scope_menu_spec.rb8
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb16
212 files changed, 6142 insertions, 2392 deletions
diff --git a/spec/lib/api/entities/basic_project_details_spec.rb b/spec/lib/api/entities/basic_project_details_spec.rb
new file mode 100644
index 00000000000..dc7c4fdce4e
--- /dev/null
+++ b/spec/lib/api/entities/basic_project_details_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BasicProjectDetails do
+ let_it_be(:project) { create(:project) }
+
+ let(:current_user) { project.owner }
+
+ subject(:output) { described_class.new(project, current_user: current_user).as_json }
+
+ describe '#default_branch' do
+ it 'delegates to Project#default_branch_or_main' do
+ expect(project).to receive(:default_branch_or_main).twice.and_call_original
+
+ expect(output).to include(default_branch: project.default_branch_or_main)
+ end
+
+ context 'anonymous user' do
+ let(:current_user) { nil }
+
+ it 'is not included' do
+ expect(output.keys).not_to include(:default_branch)
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/entities/bulk_import_spec.rb b/spec/lib/api/entities/bulk_import_spec.rb
new file mode 100644
index 00000000000..2db6862b079
--- /dev/null
+++ b/spec/lib/api/entities/bulk_import_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BulkImport do
+ let_it_be(:import) { create(:bulk_import) }
+
+ subject { described_class.new(import).as_json }
+
+ it 'has the correct attributes' do
+ expect(subject).to include(
+ :id,
+ :status,
+ :source_type,
+ :created_at,
+ :updated_at
+ )
+ end
+end
diff --git a/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
new file mode 100644
index 00000000000..adc8fdcdd9c
--- /dev/null
+++ b/spec/lib/api/entities/bulk_imports/entity_failure_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BulkImports::EntityFailure do
+ let_it_be(:failure) { create(:bulk_import_failure) }
+
+ subject { described_class.new(failure).as_json }
+
+ it 'has the correct attributes' do
+ expect(subject).to include(
+ :pipeline_class,
+ :pipeline_step,
+ :exception_class,
+ :correlation_id_value,
+ :created_at
+ )
+ end
+end
diff --git a/spec/lib/api/entities/bulk_imports/entity_spec.rb b/spec/lib/api/entities/bulk_imports/entity_spec.rb
new file mode 100644
index 00000000000..f91ae1fc5a1
--- /dev/null
+++ b/spec/lib/api/entities/bulk_imports/entity_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BulkImports::Entity do
+ let_it_be(:entity) { create(:bulk_import_entity) }
+
+ subject { described_class.new(entity).as_json }
+
+ it 'has the correct attributes' do
+ expect(subject).to include(
+ :id,
+ :bulk_import_id,
+ :status,
+ :source_full_path,
+ :destination_name,
+ :destination_namespace,
+ :parent_id,
+ :namespace_id,
+ :project_id,
+ :created_at,
+ :updated_at,
+ :failures
+ )
+ end
+end
diff --git a/spec/lib/api/entities/job_request/image_spec.rb b/spec/lib/api/entities/ci/job_request/image_spec.rb
index f13eab6a752..55aade03129 100644
--- a/spec/lib/api/entities/job_request/image_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/image_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Entities::JobRequest::Image do
+RSpec.describe API::Entities::Ci::JobRequest::Image do
let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }]}
let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports)}
let(:entity) { described_class.new(image) }
diff --git a/spec/lib/api/entities/job_request/port_spec.rb b/spec/lib/api/entities/ci/job_request/port_spec.rb
index 4820c4a691b..8e0d2cabcfc 100644
--- a/spec/lib/api/entities/job_request/port_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/port_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::API::Entities::JobRequest::Port do
+RSpec.describe ::API::Entities::Ci::JobRequest::Port do
let(:port) { double(number: 80, protocol: 'http', name: 'name')}
let(:entity) { described_class.new(port) }
diff --git a/spec/lib/api/entities/group_detail_spec.rb b/spec/lib/api/entities/group_detail_spec.rb
new file mode 100644
index 00000000000..8fcb120c809
--- /dev/null
+++ b/spec/lib/api/entities/group_detail_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::GroupDetail do
+ describe '#as_json' do
+ it 'includes prevent_sharing_groups_outside_hierarchy for a root group' do
+ group = create(:group)
+
+ expect(described_class.new(group).as_json).to include(prevent_sharing_groups_outside_hierarchy: false)
+ end
+
+ it 'excludes prevent_sharing_groups_outside_hierarchy for a subgroup' do
+ subgroup = build(:group, :nested)
+
+ expect(described_class.new(subgroup).as_json.keys).not_to include(:prevent_sharing_groups_outside_hierarchy)
+ end
+ end
+end
diff --git a/spec/lib/api/entities/plan_limit_spec.rb b/spec/lib/api/entities/plan_limit_spec.rb
index ee42c67f9b6..75e39e4f074 100644
--- a/spec/lib/api/entities/plan_limit_spec.rb
+++ b/spec/lib/api/entities/plan_limit_spec.rb
@@ -14,7 +14,8 @@ RSpec.describe API::Entities::PlanLimit do
:maven_max_file_size,
:npm_max_file_size,
:nuget_max_file_size,
- :pypi_max_file_size
+ :pypi_max_file_size,
+ :terraform_module_max_file_size
)
end
diff --git a/spec/lib/api/entities/user_spec.rb b/spec/lib/api/entities/user_spec.rb
index e35deeb6263..860f007f284 100644
--- a/spec/lib/api/entities/user_spec.rb
+++ b/spec/lib/api/entities/user_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe API::Entities::User do
subject { described_class.new(user, current_user: current_user).as_json }
it 'exposes correct attributes' do
- expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information)
+ expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information, :pronouns)
end
it 'exposes created_at if the current user can read the user profile' do
diff --git a/spec/lib/api/helpers/caching_spec.rb b/spec/lib/api/helpers/caching_spec.rb
index f94c44c7382..38b7b386d5c 100644
--- a/spec/lib/api/helpers/caching_spec.rb
+++ b/spec/lib/api/helpers/caching_spec.rb
@@ -3,7 +3,7 @@
require "spec_helper"
RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
- subject(:instance) { Class.new.include(described_class).new }
+ subject(:instance) { Class.new.include(described_class, Grape::DSL::Headers).new }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -44,108 +44,16 @@ RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
}
end
- context "single object" do
+ context 'single object' do
let_it_be(:presentable) { create(:todo, project: project) }
- it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
-
- it "uses the presenter" do
- expect(presenter).to receive(:represent).with(presentable, project: project)
-
- subject
- end
-
- it "is valid JSON" do
- parsed = Gitlab::Json.parse(subject.to_s)
-
- expect(parsed).to be_a(Hash)
- expect(parsed["id"]).to eq(presentable.id)
- end
-
- it "fetches from the cache" do
- expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
-
- subject
- end
-
- context "when a cache context is supplied" do
- before do
- kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
- end
-
- it "uses the context to augment the cache key" do
- expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{project.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
-
- subject
- end
- end
-
- context "when expires_in is supplied" do
- it "sets the expiry when accessing the cache" do
- kwargs[:expires_in] = 7.days
-
- expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: 7.days).once
-
- subject
- end
- end
+ it_behaves_like 'object cache helper'
end
- context "for a collection of objects" do
+ context 'collection of objects' do
let_it_be(:presentable) { Array.new(5).map { create(:todo, project: project) } }
- it { is_expected.to be_an(Gitlab::Json::PrecompiledJson) }
-
- it "uses the presenter" do
- presentable.each do |todo|
- expect(presenter).to receive(:represent).with(todo, project: project)
- end
-
- subject
- end
-
- it "is valid JSON" do
- parsed = Gitlab::Json.parse(subject.to_s)
-
- expect(parsed).to be_an(Array)
-
- presentable.each_with_index do |todo, i|
- expect(parsed[i]["id"]).to eq(todo.id)
- end
- end
-
- it "fetches from the cache" do
- keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
-
- expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
-
- subject
- end
-
- context "when a cache context is supplied" do
- before do
- kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
- end
-
- it "uses the context to augment the cache key" do
- keys = presentable.map { |todo| "#{todo.cache_key}:#{project.cache_key}" }
-
- expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
-
- subject
- end
- end
-
- context "expires_in is supplied" do
- it "sets the expiry when accessing the cache" do
- keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
- kwargs[:expires_in] = 7.days
-
- expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: 7.days).once.and_call_original
-
- subject
- end
- end
+ it_behaves_like 'collection cache helper'
end
end
@@ -187,6 +95,42 @@ RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
expect(nested_call.to_s).to eq(subject.to_s)
end
+
+ context 'Cache versioning' do
+ it 'returns cache based on version parameter' do
+ result_1 = instance.cache_action(cache_key, **kwargs.merge(version: 1)) { 'Cache 1' }
+ result_2 = instance.cache_action(cache_key, **kwargs.merge(version: 2)) { 'Cache 2' }
+
+ expect(result_1.to_s).to eq('Cache 1'.to_json)
+ expect(result_2.to_s).to eq('Cache 2'.to_json)
+ end
+ end
+
+ context 'Cache for pagination headers' do
+ described_class::PAGINATION_HEADERS.each do |pagination_header|
+ context pagination_header do
+ before do
+ instance.header(pagination_header, 100)
+ end
+
+ it 'stores and recovers pagination headers from cache' do
+ expect { perform }.not_to change { instance.header[pagination_header] }
+
+ instance.header.delete(pagination_header)
+
+ expect { perform }.to change { instance.header[pagination_header] }.from(nil).to(100)
+ end
+
+ it 'prefers headers from request than from cache' do
+ expect { perform }.not_to change { instance.header[pagination_header] }
+
+ instance.header(pagination_header, 50)
+
+ expect { perform }.not_to change { instance.header[pagination_header] }.from(50)
+ end
+ end
+ end
+ end
end
describe "#cache_action_if" do
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 2bce4cab679..f57037d5652 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Backup::Database do
context 'when the restore command prints errors' do
let(:visible_error) { "This is a test error\n" }
- let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\n" }
+ let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
it 'filters out noise from errors' do
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 13567ead842..cdb35c0ce01 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Backup::GitalyBackup do
+ let(:parallel) { nil }
+ let(:parallel_storage) { nil }
let(:progress) do
Tempfile.new('progress').tap do |progress|
progress.unlink
@@ -13,7 +15,7 @@ RSpec.describe Backup::GitalyBackup do
progress.close
end
- subject { described_class.new(progress) }
+ subject { described_class.new(progress, parallel: parallel, parallel_storage: parallel_storage) }
context 'unknown' do
it 'fails to start unknown' do
@@ -30,6 +32,8 @@ RSpec.describe Backup::GitalyBackup do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.owner)
+ expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, { in: anything, out: progress }).and_call_original
+
subject.start(:create)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
@@ -45,6 +49,28 @@ RSpec.describe Backup::GitalyBackup do
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
end
+ context 'parallel option set' do
+ let(:parallel) { 3 }
+
+ it 'passes parallel option through' do
+ expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, '-parallel', '3', { in: anything, out: progress }).and_call_original
+
+ subject.start(:create)
+ subject.wait
+ end
+ end
+
+ context 'parallel_storage option set' do
+ let(:parallel_storage) { 3 }
+
+ it 'passes parallel option through' do
+ expect(Process).to receive(:spawn).with(anything, 'create', '-path', anything, '-parallel-storage', '3', { in: anything, out: progress }).and_call_original
+
+ subject.start(:create)
+ subject.wait
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
@@ -83,6 +109,8 @@ RSpec.describe Backup::GitalyBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
+ expect(Process).to receive(:spawn).with(anything, 'restore', '-path', anything, { in: anything, out: progress }).and_call_original
+
subject.start(:restore)
subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI)
@@ -100,6 +128,17 @@ RSpec.describe Backup::GitalyBackup do
expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1'])
end
+ context 'parallel option set' do
+ let(:parallel) { 3 }
+
+ it 'does not pass parallel option through' do
+ expect(Process).to receive(:spawn).with(anything, 'restore', '-path', anything, { in: anything, out: progress }).and_call_original
+
+ subject.start(:restore)
+ subject.wait
+ end
+ end
+
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index d77b1e0f276..85818038c9d 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe Backup::Repositories do
let(:progress) { spy(:stdout) }
- let(:strategy) { spy(:strategy) }
+ let(:parallel_enqueue) { true }
+ let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) }
subject { described_class.new(progress, strategy: strategy) }
@@ -80,6 +81,22 @@ RSpec.describe Backup::Repositories do
end
end
+ context 'concurrency with a strategy without parallel enqueueing support' do
+ let(:parallel_enqueue) { false }
+
+ it 'enqueues all projects sequentially' do
+ expect(Thread).not_to receive(:new)
+
+ expect(strategy).to receive(:start).with(:create)
+ projects.each do |project|
+ expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ end
+ expect(strategy).to receive(:wait)
+
+ subject.dump(max_concurrency: 2, max_storage_concurrency: 2)
+ end
+ end
+
[4, 10].each do |max_storage_concurrency|
context "max_storage_concurrency #{max_storage_concurrency}", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/241701' do
let(:storage_keys) { %w[default test_second_storage] }
diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index 7557b9a118d..d7bcebbbe34 100644
--- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
end
context "redmine project" do
- let_it_be(:service) { create(:redmine_service, project: project) }
+ let_it_be(:integration) { create(:redmine_integration, project: project) }
before do
project.update!(issues_enabled: false)
@@ -140,7 +140,9 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
end
context "youtrack project" do
- let_it_be(:service) { create(:youtrack_service, project: project) }
+ before_all do
+ create(:youtrack_integration, project: project)
+ end
before do
project.update!(issues_enabled: false)
@@ -183,7 +185,7 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
end
context "jira project" do
- let_it_be(:service) { create(:jira_service, project: project) }
+ let_it_be(:service) { create(:jira_integration, project: project) }
let(:reference) { issue.to_reference }
@@ -215,8 +217,6 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
context "ewm project" do
let_it_be(:integration) { create(:ewm_integration, project: project) }
- let(:service) { integration } # TODO: remove when https://gitlab.com/gitlab-org/gitlab/-/issues/330300 is complete
-
before do
project.update!(issues_enabled: false)
end
diff --git a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index dafdc71ce64..f8a00716680 100644
--- a/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -327,6 +327,7 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do
it_behaves_like 'String-based single-word references'
it_behaves_like 'String-based multi-word references in quotes'
it_behaves_like 'referencing a milestone in a link href'
+ it_behaves_like 'linking to a milestone as the entire link'
it_behaves_like 'cross-project / cross-namespace complete reference'
it_behaves_like 'cross-project / same-namespace complete reference'
it_behaves_like 'cross project shorthand reference'
@@ -460,4 +461,76 @@ RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do
include_context 'group milestones'
end
end
+
+ context 'checking N+1' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group2) { create(:group) }
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:project2) { create(:project, :public, namespace: group2) }
+ let_it_be(:project3) { create(:project, :public) }
+ let_it_be(:project_milestone) { create(:milestone, project: project) }
+ let_it_be(:project_milestone2) { create(:milestone, project: project) }
+ let_it_be(:project2_milestone) { create(:milestone, project: project2) }
+ let_it_be(:group2_milestone) { create(:milestone, group: group2) }
+ let_it_be(:project_reference) { "#{project_milestone.to_reference}" }
+ let_it_be(:project_reference2) { "#{project_milestone2.to_reference}" }
+ let_it_be(:project2_reference) { "#{project2_milestone.to_reference(full: true)}" }
+ let_it_be(:group2_reference) { "#{project2.full_path}%\"#{group2_milestone.name}\"" }
+
+ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
+ markdown = "#{project_reference}"
+ control_count = 4
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ markdown = "#{project_reference} %qwert %werty %ertyu %rtyui #{project_reference2}"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+
+ it 'has N+1 for multiple unique project/group references', :use_sql_query_cache do
+ markdown = "#{project_reference}"
+ control_count = 4
+
+ expect do
+ reference_filter(markdown, project: project)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # Since we're not batching milestone queries across projects/groups,
+ # queries increase when a new project/group is added.
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
+ markdown = "#{project_reference} #{group2_reference}"
+ control_count += 5
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # third reference to already queried project/namespace, nothing extra (no N+1 here)
+ markdown = "#{project_reference} #{group2_reference} #{project_reference2}"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # last reference needs additional queries
+ markdown = "#{project_reference} #{group2_reference} #{project2_reference} #{project3.full_path}%test_milestone"
+ control_count += 6
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+
+ # Use an iid instead of title reference
+ markdown = "#{project_reference} #{group2_reference} #{project2.full_path}%#{project2_milestone.iid} #{project3.full_path}%test_milestone"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/upload_link_filter_spec.rb b/spec/lib/banzai/filter/upload_link_filter_spec.rb
index 9ca499be665..eb45a8149c3 100644
--- a/spec/lib/banzai/filter/upload_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/upload_link_filter_spec.rb
@@ -42,6 +42,12 @@ RSpec.describe Banzai::Filter::UploadLinkFilter do
let(:upload_path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
let(:relative_path) { "/#{project.full_path}#{upload_path}" }
+ it 'preserves original url in data-canonical-src attribute' do
+ doc = filter(link(upload_path))
+
+ expect(doc.at_css('a')['data-canonical-src']).to eq(upload_path)
+ end
+
context 'to a project upload' do
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
index b5b5349946b..70c7c3c74fb 100644
--- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
@@ -22,6 +22,24 @@ RSpec.describe Banzai::Filter::WikiLinkFilter do
expect(filtered_link.attribute('href').value).to eq('/uploads/a.test')
end
+ describe 'when links are rewritable' do
+ it "stores original url in the data-canonical-src attribute" do
+ original_path = "#{repository_upload_folder}/a.jpg"
+ filtered_elements = filter("<a href='#{original_path}'><img src='#{original_path}'>example</img></a>", wiki: wiki)
+
+ expect(filtered_elements.search('img').first.attribute('data-canonical-src').value).to eq(original_path)
+ expect(filtered_elements.search('a').first.attribute('data-canonical-src').value).to eq(original_path)
+ end
+ end
+
+ describe 'when links are not rewritable' do
+ it "does not store original url in the data-canonical-src attribute" do
+ filtered_link = filter("<a href='/uploads/a.test'>Link</a>", wiki: wiki).children[0]
+
+ expect(filtered_link.value?('data-canonical-src')).to eq(false)
+ end
+ end
+
describe 'when links point to the relative wiki path' do
it 'does not rewrite links' do
path = "#{wiki.wiki_base_path}/#{repository_upload_folder}/a.jpg"
diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb
index 18d8418ca23..095500cdc53 100644
--- a/spec/lib/banzai/reference_parser/base_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb
@@ -78,12 +78,31 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
describe '#referenced_by' do
context 'when references_relation is implemented' do
- it 'returns a collection of objects' do
- links = Nokogiri::HTML.fragment("<a data-foo='#{user.id}'></a>")
- .children
+ context 'and ids_only is set to false' do
+ it 'returns a collection of objects' do
+ links = Nokogiri::HTML.fragment("<a data-foo='#{user.id}'></a>")
+ .children
- expect(subject).to receive(:references_relation).and_return(User)
- expect(subject.referenced_by(links)).to eq([user])
+ expect(subject).to receive(:references_relation).and_return(User)
+ expect(subject.referenced_by(links)).to eq([user])
+ end
+ end
+
+ context 'and ids_only is set to true' do
+ it 'returns a collection of id values without performing a db query' do
+ links = Nokogiri::HTML.fragment("<a data-foo='1'></a><a data-foo='2'></a>").children
+
+ expect(subject).not_to receive(:references_relation)
+ expect(subject.referenced_by(links, ids_only: true)).to eq(%w(1 2))
+ end
+
+ context 'and the html fragment does not contain any attributes' do
+ it 'returns an empty array' do
+ links = Nokogiri::HTML.fragment("no links").children
+
+ expect(subject.referenced_by(links, ids_only: true)).to eq([])
+ end
+ end
end
end
@@ -188,7 +207,7 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
dummy = Class.new(described_class) do
self.reference_type = :test
- def gather_references(nodes)
+ def gather_references(nodes, ids_only: false)
nodes
end
end
@@ -222,7 +241,7 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
nodes.select { |n| n.id > 5 }
end
- def referenced_by(nodes)
+ def referenced_by(nodes, ids_only: false)
nodes.map(&:id)
end
end
diff --git a/spec/lib/bulk_imports/clients/graphql_spec.rb b/spec/lib/bulk_imports/clients/graphql_spec.rb
new file mode 100644
index 00000000000..2f212458c4a
--- /dev/null
+++ b/spec/lib/bulk_imports/clients/graphql_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Clients::Graphql do
+ let_it_be(:config) { create(:bulk_import_configuration) }
+
+ subject { described_class.new(url: config.url, token: config.access_token) }
+
+ describe '#execute' do
+ let(:query) { '{ metadata { version } }' }
+ let(:graphql_client_double) { double }
+ let(:response_double) { double }
+
+ before do
+ stub_const('BulkImports::MINIMUM_COMPATIBLE_MAJOR_VERSION', version)
+ allow(graphql_client_double).to receive(:execute)
+ allow(subject).to receive(:client).and_return(graphql_client_double)
+ allow(graphql_client_double).to receive(:execute).with(query).and_return(response_double)
+ allow(response_double).to receive_message_chain(:data, :metadata, :version).and_return(version)
+ end
+
+ context 'when source instance is compatible' do
+ let(:version) { '14.0.0' }
+
+ it 'marks source instance as compatible' do
+ subject.execute('test')
+
+ expect(subject.instance_variable_get(:@compatible_instance_version)).to eq(true)
+ end
+ end
+
+ context 'when source instance is incompatible' do
+ let(:version) { '13.0.0' }
+
+ it 'raises an error' do
+ expect { subject.execute('test') }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.")
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index ac42f12a3d4..c36cb80851a 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -5,12 +5,20 @@ require 'spec_helper'
RSpec.describe BulkImports::Clients::HTTP do
include ImportSpecHelper
- let(:uri) { 'http://gitlab.example' }
+ let(:url) { 'http://gitlab.example' }
let(:token) { 'token' }
let(:resource) { 'resource' }
+ let(:version) { "#{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.0.0" }
let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
+ let(:version_response) { double(code: 200, success?: true, parsed_response: { 'version' => version }) }
- subject { described_class.new(uri: uri, token: token) }
+ before do
+ allow(Gitlab::HTTP).to receive(:get)
+ .with('http://gitlab.example/api/v4/version', anything)
+ .and_return(version_response)
+ end
+
+ subject { described_class.new(url: url, token: token) }
shared_examples 'performs network request' do
it 'performs network request' do
@@ -21,20 +29,20 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'error handling' do
context 'when error occurred' do
- it 'raises ConnectionError' do
+ it 'raises BulkImports::Error' do
allow(Gitlab::HTTP).to receive(method).and_raise(Errno::ECONNREFUSED)
- expect { subject.public_send(method, resource) }.to raise_exception(described_class::ConnectionError)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::Error)
end
end
context 'when response is not success' do
- it 'raises ConnectionError' do
+ it 'raises BulkImports::Error' do
response_double = double(code: 503, success?: false)
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.public_send(method, resource) }.to raise_exception(described_class::ConnectionError)
+ expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::Error)
end
end
end
@@ -46,7 +54,7 @@ RSpec.describe BulkImports::Clients::HTTP do
include_examples 'performs network request' do
let(:expected_args) do
[
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
follow_redirects: false,
query: {
@@ -96,7 +104,7 @@ RSpec.describe BulkImports::Clients::HTTP do
private
def stub_http_get(path, query, response)
- uri = "http://gitlab.example:80/api/v4/#{path}"
+ uri = "http://gitlab.example/api/v4/#{path}"
params = {
follow_redirects: false,
headers: {
@@ -116,7 +124,7 @@ RSpec.describe BulkImports::Clients::HTTP do
include_examples 'performs network request' do
let(:expected_args) do
[
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
body: {},
follow_redirects: false,
@@ -136,7 +144,7 @@ RSpec.describe BulkImports::Clients::HTTP do
include_examples 'performs network request' do
let(:expected_args) do
[
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
follow_redirects: false,
headers: {
@@ -152,7 +160,7 @@ RSpec.describe BulkImports::Clients::HTTP do
describe '#stream' do
it 'performs network request with stream_body option' do
expected_args = [
- 'http://gitlab.example:80/api/v4/resource',
+ 'http://gitlab.example/api/v4/resource',
hash_including(
stream_body: true,
headers: {
@@ -167,4 +175,28 @@ RSpec.describe BulkImports::Clients::HTTP do
subject.stream(resource)
end
end
+
+ context 'when source instance is incompatible' do
+ let(:version) { '13.0.0' }
+
+ it 'raises an error' do
+ expect { subject.get(resource) }.to raise_error(::BulkImports::Error, "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MINIMUM_GITLAB_MAJOR_VERSION}.")
+ end
+ end
+
+ context 'when url is relative' do
+ let(:url) { 'http://website.example/gitlab' }
+
+ before do
+ allow(Gitlab::HTTP).to receive(:get)
+ .with('http://website.example/gitlab/api/v4/version', anything)
+ .and_return(version_response)
+ end
+
+ it 'performs network request to a relative gitlab url' do
+ expect(Gitlab::HTTP).to receive(:get).with('http://website.example/gitlab/api/v4/resource', anything).and_return(response_double)
+
+ subject.get(resource)
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
index 533955b057c..de0b56045b3 100644
--- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
+++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
let(:service_double) { instance_double(::Groups::CreateService) }
let(:data) { { foo: :bar } }
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
new file mode 100644
index 00000000000..c68284aa580
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/group_avatar_pipeline_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::GroupAvatarPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ group: group,
+ bulk_import: bulk_import,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject { described_class.new(context) }
+
+ describe '#run' do
+ it 'updates the group avatar' do
+ avatar_path = 'spec/fixtures/dk.png'
+ stub_file_download(
+ avatar_path,
+ configuration: context.configuration,
+ relative_url: "/groups/source%2Ffull%2Fpath/avatar",
+ dir: an_instance_of(String),
+ file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
+ allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
+ )
+
+ expect { subject.run }.to change(context.group, :avatar)
+
+ expect(context.group.avatar.filename).to eq(File.basename(avatar_path))
+ end
+
+ it 'raises an error when the avatar upload fails' do
+ avatar_path = 'spec/fixtures/aosp_manifest.xml'
+ stub_file_download(
+ avatar_path,
+ configuration: context.configuration,
+ relative_url: "/groups/source%2Ffull%2Fpath/avatar",
+ dir: an_instance_of(String),
+ file_size_limit: Avatarable::MAXIMUM_FILE_SIZE,
+ allowed_content_types: described_class::ALLOWED_AVATAR_DOWNLOAD_TYPES
+ )
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger).to receive(:error)
+ .with(
+ bulk_import_id: context.bulk_import.id,
+ bulk_import_entity_id: context.entity.id,
+ bulk_import_entity_type: context.entity.source_type,
+ context_extra: context.extra,
+ exception_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline::GroupAvatarLoadingError",
+ exception_message: "Avatar file format is not supported. Please try one of the following supported formats: image/png, image/jpeg, image/gif, image/bmp, image/tiff, image/vnd.microsoft.icon",
+ pipeline_class: "BulkImports::Groups::Pipelines::GroupAvatarPipeline",
+ pipeline_step: :loader
+ )
+ end
+
+ expect { subject.run }.to change(BulkImports::Failure, :count)
+ end
+ end
+
+ def stub_file_download(filepath = 'file/path.png', **params)
+ expect_next_instance_of(BulkImports::FileDownloadService, params.presence) do |downloader|
+ expect(downloader).to receive(:execute).and_return(filepath)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
index d8a667ec92a..0126acb320b 100644
--- a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
@@ -63,6 +63,14 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
expect(member.updated_at).to eq('2020-01-01T00:00:00Z')
expect(member.expires_at).to eq(nil)
end
+
+ context 'when user_id is current user id' do
+ it 'does not create new member' do
+ data = { 'user_id' => user.id }
+
+ expect { subject.load(context, data) }.not_to change(GroupMember, :count)
+ end
+ end
end
describe 'pipeline parts' do
diff --git a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
index f3905a4b6e4..af99428e0c1 100644
--- a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
@@ -84,9 +84,34 @@ RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
expect(subject.transform(context, data)).to be_nil
end
end
+
+ context 'source user id caching' do
+ context 'when user gid is present' do
+ it 'caches source user id' do
+ gid = 'gid://gitlab/User/7'
+ data = member_data(email: user.email, gid: gid)
+
+ expect_next_instance_of(BulkImports::UsersMapper) do |mapper|
+ expect(mapper).to receive(:cache_source_user_id).with('7', user.id)
+ end
+
+ subject.transform(context, data)
+ end
+ end
+
+ context 'when user gid is missing' do
+ it 'does not use caching' do
+ data = member_data(email: user.email)
+
+ expect(BulkImports::UsersMapper).not_to receive(:new)
+
+ subject.transform(context, data)
+ end
+ end
+ end
end
- def member_data(email: '', access_level: 30)
+ def member_data(email: '', gid: nil, access_level: 30)
{
'created_at' => '2020-01-01T00:00:00Z',
'updated_at' => '2020-01-01T00:00:00Z',
@@ -95,6 +120,7 @@ RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
'integer_value' => access_level
},
'user' => {
+ 'user_gid' => gid,
'public_email' => email
}
}
diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
index a5d1a5f7fbb..57a258b0d9f 100644
--- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb
@@ -106,8 +106,11 @@ RSpec.describe BulkImports::NdjsonPipeline do
data = [hash, 1]
user = double
config = double(relation_excluded_keys: nil, top_relation_tree: [])
- context = double(portable: group, current_user: user, import_export_config: config)
+ import_double = instance_double(BulkImport, id: 1)
+ entity_double = instance_double(BulkImports::Entity, id: 2)
+ context = double(portable: group, current_user: user, import_export_config: config, bulk_import: import_double, entity: entity_double)
allow(subject).to receive(:import_export_config).and_return(config)
+ allow(subject).to receive(:context).and_return(context)
expect(Gitlab::ImportExport::Group::RelationFactory)
.to receive(:create)
@@ -116,7 +119,7 @@ RSpec.describe BulkImports::NdjsonPipeline do
relation_sym: :test,
relation_hash: hash,
importable: group,
- members_mapper: instance_of(Gitlab::ImportExport::MembersMapper),
+ members_mapper: instance_of(BulkImports::UsersMapper),
object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
user: user,
excluded_keys: nil
diff --git a/spec/lib/bulk_imports/stage_spec.rb b/spec/lib/bulk_imports/stage_spec.rb
index d082faa90bc..4398b00e7e9 100644
--- a/spec/lib/bulk_imports/stage_spec.rb
+++ b/spec/lib/bulk_imports/stage_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe BulkImports::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
+ [1, BulkImports::Groups::Pipelines::GroupAvatarPipeline],
[1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
[1, BulkImports::Groups::Pipelines::MembersPipeline],
[1, BulkImports::Groups::Pipelines::LabelsPipeline],
diff --git a/spec/lib/bulk_imports/users_mapper_spec.rb b/spec/lib/bulk_imports/users_mapper_spec.rb
new file mode 100644
index 00000000000..e6357319d05
--- /dev/null
+++ b/spec/lib/bulk_imports/users_mapper_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::UsersMapper do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) { create(:bulk_import_entity, bulk_import: import) }
+
+ let(:context) do
+ instance_double(
+ BulkImports::Pipeline::Context,
+ bulk_import: import,
+ entity: entity,
+ current_user: user
+ )
+ end
+
+ subject { described_class.new(context: context) }
+
+ describe '#map' do
+ context 'when value for specified key exists' do
+ it 'returns a map of source & destination user ids from redis' do
+ allow(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).and_return({ "1" => "2" })
+
+ expect(subject.map).to eq({ 1 => 2 })
+ end
+ end
+
+ context 'when value for specified key does not exist' do
+ it 'returns default value' do
+ expect(subject.map[:non_existent_key]).to eq(user.id)
+ end
+ end
+ end
+
+ describe '#default_user_id' do
+ it 'returns current user id' do
+ expect(subject.default_user_id).to eq(user.id)
+ end
+ end
+
+ describe '#include?' do
+ context 'when source user id is present in the map' do
+ it 'returns true' do
+ allow(subject).to receive(:map).and_return({ 1 => 2 })
+
+ expect(subject.include?(1)).to eq(true)
+ end
+ end
+
+ context 'when source user id is missing in the map' do
+ it 'returns false' do
+ allow(subject).to receive(:map).and_return({})
+
+ expect(subject.include?(1)).to eq(false)
+ end
+ end
+ end
+
+ describe '#cache_source_user_id' do
+ it 'caches provided source & destination user ids in redis' do
+ expect(Gitlab::Cache::Import::Caching).to receive(:hash_add).with("bulk_imports/#{import.id}/#{entity.id}/source_user_ids", 1, 2)
+
+ subject.cache_source_user_id(1, 2)
+ end
+ end
+end
diff --git a/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb b/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb
new file mode 100644
index 00000000000..6f12c6d25e0
--- /dev/null
+++ b/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ErrorTracking::Collector::SentryRequestParser do
+ describe '.parse' do
+ let_it_be(:raw_event) { fixture_file('error_tracking/event.txt') }
+ let_it_be(:parsed_event) { Gitlab::Json.parse(fixture_file('error_tracking/parsed_event.json')) }
+
+ let(:body) { raw_event }
+ let(:headers) { { 'Content-Encoding' => '' } }
+ let(:request) { double('request', headers: headers, body: StringIO.new(body)) }
+
+ subject { described_class.parse(request) }
+
+ RSpec.shared_examples 'valid parser' do
+ it 'returns a valid hash' do
+ parsed_request = subject
+
+ expect(parsed_request[:request_type]).to eq('event')
+ expect(parsed_request[:event]).to eq(parsed_event)
+ end
+ end
+
+ context 'empty body content' do
+ let(:body) { '' }
+
+ it 'fails with exception' do
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+
+ context 'plain text sentry request' do
+ it_behaves_like 'valid parser'
+ end
+
+ context 'gzip encoded sentry request' do
+ let(:headers) { { 'Content-Encoding' => 'gzip' } }
+ let(:body) { Zlib.gzip(raw_event) }
+
+ it_behaves_like 'valid parser'
+ end
+ end
+end
diff --git a/spec/lib/extracts_path_spec.rb b/spec/lib/extracts_path_spec.rb
index b69cbbf0ec0..05f3bb2f71a 100644
--- a/spec/lib/extracts_path_spec.rb
+++ b/spec/lib/extracts_path_spec.rb
@@ -7,9 +7,17 @@ RSpec.describe ExtractsPath do
include RepoHelpers
include Gitlab::Routing
+ # Make url_for work
+ def default_url_options
+ { controller: 'projects/blob', action: 'show', namespace_id: @project.namespace.path, project_id: @project.path }
+ end
+
let_it_be(:owner) { create(:user) }
let_it_be(:container) { create(:project, :repository, creator: owner) }
+
let(:request) { double('request') }
+ let(:flash) { {} }
+ let(:redirect_renamed_default_branch?) { true }
before do
@project = container
@@ -17,11 +25,14 @@ RSpec.describe ExtractsPath do
allow(container.repository).to receive(:ref_names).and_return(ref_names)
allow(request).to receive(:format=)
+ allow(request).to receive(:get?)
+ allow(request).to receive(:head?)
end
describe '#assign_ref_vars' do
let(:ref) { sample_commit[:id] }
- let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
+ let(:path) { sample_commit[:line_code_path] }
+ let(:params) { { path: path, ref: ref } }
it_behaves_like 'assigns ref vars'
@@ -125,6 +136,66 @@ RSpec.describe ExtractsPath do
expect(@commit).to be_nil
end
end
+
+ context 'ref points to a previous default branch' do
+ let(:ref) { 'develop' }
+
+ before do
+ @project.update!(previous_default_branch: ref)
+
+ allow(@project).to receive(:default_branch).and_return('foo')
+ end
+
+ it 'redirects to the new default branch for a GET request' do
+ allow(request).to receive(:get?).and_return(true)
+
+ expect(self).to receive(:redirect_to).with("http://localhost/#{@project.full_path}/-/blob/foo/#{path}")
+ expect(self).not_to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash[:notice]).to match(/default branch/)
+ end
+
+ it 'redirects to the new default branch for a HEAD request' do
+ allow(request).to receive(:head?).and_return(true)
+
+ expect(self).to receive(:redirect_to).with("http://localhost/#{@project.full_path}/-/blob/foo/#{path}")
+ expect(self).not_to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash[:notice]).to match(/default branch/)
+ end
+
+ it 'returns 404 for any other request type' do
+ expect(self).not_to receive(:redirect_to)
+ expect(self).to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash).to be_empty
+ end
+
+ context 'redirect behaviour is disabled' do
+ let(:redirect_renamed_default_branch?) { false }
+
+ it 'returns 404 for a GET request' do
+ allow(request).to receive(:get?).and_return(true)
+
+ expect(self).not_to receive(:redirect_to)
+ expect(self).to receive(:render_404)
+
+ assign_ref_vars
+
+ expect(@commit).to be_nil
+ expect(flash).to be_empty
+ end
+ end
+ end
end
it_behaves_like 'extracts refs'
diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb
index 5433a512981..3cdce150de9 100644
--- a/spec/lib/extracts_ref_spec.rb
+++ b/spec/lib/extracts_ref_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe ExtractsRef do
let_it_be(:owner) { create(:user) }
let_it_be(:container) { create(:snippet, :repository, author: owner) }
+
let(:ref) { sample_commit[:id] }
let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
diff --git a/spec/lib/gitlab/analytics/unique_visits_spec.rb b/spec/lib/gitlab/analytics/unique_visits_spec.rb
deleted file mode 100644
index f4d5c0b1eca..00000000000
--- a/spec/lib/gitlab/analytics/unique_visits_spec.rb
+++ /dev/null
@@ -1,81 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state do
- let(:unique_visits) { Gitlab::Analytics::UniqueVisits.new }
- let(:target1_id) { 'g_analytics_contribution' }
- let(:target2_id) { 'g_analytics_insights' }
- let(:target3_id) { 'g_analytics_issues' }
- let(:target4_id) { 'g_compliance_dashboard' }
- let(:target5_id) { 'i_compliance_credential_inventory' }
- let(:visitor1_id) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
- let(:visitor2_id) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
- let(:visitor3_id) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
-
- around do |example|
- # We need to freeze to a reference time
- # because visits are grouped by the week number in the year
- # Without freezing the time, the test may behave inconsistently
- # depending on which day of the week test is run.
- reference_time = Time.utc(2020, 6, 1)
- travel_to(reference_time) { example.run }
- end
-
- describe '#track_visit' do
- it 'tracks the unique weekly visits for targets' do
- unique_visits.track_visit(target1_id, values: visitor1_id, time: 7.days.ago)
- unique_visits.track_visit(target1_id, values: visitor1_id, time: 7.days.ago)
- unique_visits.track_visit(target1_id, values: visitor2_id, time: 7.days.ago)
-
- unique_visits.track_visit(target2_id, values: visitor2_id, time: 7.days.ago)
- unique_visits.track_visit(target2_id, values: visitor1_id, time: 8.days.ago)
- unique_visits.track_visit(target2_id, values: visitor1_id, time: 15.days.ago)
-
- unique_visits.track_visit(target4_id, values: visitor3_id, time: 7.days.ago)
-
- unique_visits.track_visit(target5_id, values: visitor3_id, time: 15.days.ago)
- unique_visits.track_visit(target5_id, values: visitor2_id, time: 15.days.ago)
-
- expect(unique_visits.unique_visits_for(targets: target1_id)).to eq(2)
- expect(unique_visits.unique_visits_for(targets: target2_id)).to eq(1)
- expect(unique_visits.unique_visits_for(targets: target4_id)).to eq(1)
-
- expect(unique_visits.unique_visits_for(targets: target2_id, start_date: 15.days.ago)).to eq(1)
-
- expect(unique_visits.unique_visits_for(targets: target3_id)).to eq(0)
-
- expect(unique_visits.unique_visits_for(targets: target5_id, start_date: 15.days.ago)).to eq(2)
-
- expect(unique_visits.unique_visits_for(targets: :analytics)).to eq(2)
- expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 15.days.ago)).to eq(1)
- expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 30.days.ago)).to eq(0)
-
- expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
-
- expect(unique_visits.unique_visits_for(targets: :compliance)).to eq(1)
- expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 15.days.ago)).to eq(2)
- expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 30.days.ago)).to eq(0)
-
- expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
- end
-
- it 'sets the keys in Redis to expire automatically after 12 weeks' do
- unique_visits.track_visit(target1_id, values: visitor1_id)
-
- Gitlab::Redis::SharedState.with do |redis|
- redis.scan_each(match: "{#{target1_id}}-*").each do |key|
- expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
- end
- end
- end
-
- it 'raises an error if an invalid target id is given' do
- invalid_target_id = "x_invalid"
-
- expect do
- unique_visits.track_visit(invalid_target_id, values: visitor1_id)
- end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
- end
- end
-end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 7475ed2796f..14200733c19 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -460,7 +460,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect { find_user_from_access_token }.to raise_error(Gitlab::Auth::UnauthorizedError)
end
- context 'no feed or API requests' do
+ context 'no feed, API or archive requests' do
it 'returns nil if the request is not RSS' do
expect(find_user_from_web_access_token(:rss)).to be_nil
end
@@ -472,6 +472,10 @@ RSpec.describe Gitlab::Auth::AuthFinders do
it 'returns nil if the request is not API' do
expect(find_user_from_web_access_token(:api)).to be_nil
end
+
+ it 'returns nil if the request is not ARCHIVE' do
+ expect(find_user_from_web_access_token(:archive)).to be_nil
+ end
end
it 'returns the user for RSS requests' do
@@ -486,6 +490,24 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect(find_user_from_web_access_token(:ics)).to eq(user)
end
+ it 'returns the user for ARCHIVE requests' do
+ set_header('SCRIPT_NAME', '/-/archive/main.zip')
+
+ expect(find_user_from_web_access_token(:archive)).to eq(user)
+ end
+
+ context 'when allow_archive_as_web_access_format feature flag is disabled' do
+ before do
+ stub_feature_flags(allow_archive_as_web_access_format: false)
+ end
+
+ it 'returns nil for ARCHIVE requests' do
+ set_header('SCRIPT_NAME', '/-/archive/main.zip')
+
+ expect(find_user_from_web_access_token(:archive)).to be_nil
+ end
+ end
+
context 'for API requests' do
it 'returns the user' do
set_header('SCRIPT_NAME', '/api/endpoint')
diff --git a/spec/lib/gitlab/auth/ldap/adapter_spec.rb b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
index 8546d63cf77..b7b12e49a8e 100644
--- a/spec/lib/gitlab/auth/ldap/adapter_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
@@ -95,6 +95,40 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
describe '#ldap_search' do
subject { adapter.ldap_search(base: :dn, filter: :filter) }
+ shared_examples 'connection retry' do
+ before do
+ allow(adapter).to receive(:renew_connection_adapter).and_return(ldap)
+ allow(Gitlab::AppLogger).to receive(:warn)
+ end
+
+ context 'retries the operation' do
+ before do
+ stub_const("#{described_class}::MAX_SEARCH_RETRIES", 3)
+ end
+
+ it 'as many times as MAX_SEARCH_RETRIES' do
+ expect(ldap).to receive(:search).exactly(3).times
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ end
+
+ context 'when no more retries' do
+ before do
+ stub_const("#{described_class}::MAX_SEARCH_RETRIES", 1)
+ end
+
+ it 'raises the exception' do
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ end
+
+ it 'logs the error' do
+ expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ expect(Gitlab::AppLogger).to have_received(:warn).with(
+ "LDAP search raised exception Net::LDAP::Error: #{err_message}")
+ end
+ end
+ end
+ end
+
context "when the search is successful" do
context "and the result is non-empty" do
before do
@@ -110,6 +144,22 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
end
it { is_expected.to eq [] }
+
+ context 'when returned with expected code' do
+ let(:response_code) { 80 }
+ let(:response_message) { 'Other' }
+ let(:err_message) { "Got empty results with response code: #{response_code}, message: #{response_message}" }
+
+ before do
+ stub_ldap_config(retry_empty_result_with_codes: [response_code])
+ allow(ldap).to receive_messages(
+ search: nil,
+ get_operation_result: double(code: response_code, message: response_message)
+ )
+ end
+
+ it_behaves_like 'connection retry'
+ end
end
end
@@ -132,30 +182,13 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
end
context 'retries the operation' do
- before do
- stub_const("#{described_class}::MAX_SEARCH_RETRIES", 3)
- end
+ let(:err_message) { 'some error' }
- it 'as many times as MAX_SEARCH_RETRIES' do
- expect(ldap).to receive(:search).exactly(3).times
- expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
+ before do
+ allow(ldap).to receive(:search) { raise Net::LDAP::Error, err_message }
end
- context 'when no more retries' do
- before do
- stub_const("#{described_class}::MAX_SEARCH_RETRIES", 1)
- end
-
- it 'raises the exception' do
- expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
- end
-
- it 'logs the error' do
- expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
- expect(Gitlab::AppLogger).to have_received(:warn).with(
- "LDAP search raised exception Net::LDAP::Error: some error")
- end
- end
+ it_behaves_like 'connection retry'
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
new file mode 100644
index 00000000000..f56cf899410
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillDraftStatusOnMergeRequests do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:merge_requests) { table(:merge_requests) }
+
+ let(:group) { namespaces.create!(name: 'gitlab', path: 'gitlab') }
+ let(:project) { projects.create!(namespace_id: group.id) }
+
+ let(:draft_prefixes) { ["[Draft]", "(Draft)", "Draft:", "Draft", "[WIP]", "WIP:", "WIP"] }
+
+ def create_merge_request(params)
+ common_params = {
+ target_project_id: project.id,
+ target_branch: 'feature1',
+ source_branch: 'master'
+ }
+
+ merge_requests.create!(common_params.merge(params))
+ end
+
+ context "for MRs with #draft? == true titles but draft attribute false" do
+ before do
+ draft_prefixes.each do |prefix|
+ (1..4).each do |n|
+ create_merge_request(
+ title: "#{prefix} This is a title",
+ draft: false,
+ state_id: n
+ )
+ end
+ end
+ end
+
+ it "updates all open draft merge request's draft field to true" do
+ mr_count = merge_requests.all.count
+ mr_ids = merge_requests.all.collect(&:id)
+
+ expect { subject.perform(mr_ids.first, mr_ids.last) }
+ .to change { MergeRequest.where(draft: false).count }
+ .from(mr_count).to(mr_count - draft_prefixes.length)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
index 7fe82420364..58864aac084 100644
--- a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
@@ -3,18 +3,18 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20201028182809 do
- let_it_be(:jira_service_temp) { described_class::JiraServiceTemp }
+ let_it_be(:jira_integration_temp) { described_class::JiraServiceTemp }
let_it_be(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
let_it_be(:atlassian_host) { 'https://api.atlassian.net' }
let_it_be(:mixedcase_host) { 'https://api.AtlassiaN.nEt' }
let_it_be(:server_host) { 'https://my.server.net' }
- let(:jira_service) { jira_service_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') }
+ let(:jira_integration) { jira_integration_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') }
subject { described_class.new }
def create_tracker_data(options = {})
- jira_tracker_data_temp.create!({ service_id: jira_service.id }.merge(options))
+ jira_tracker_data_temp.create!({ service_id: jira_integration.id }.merge(options))
end
describe '#perform' do
diff --git a/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb
new file mode 100644
index 00000000000..b084e3fe885
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillUpvotesCountOnIssues, schema: 20210701111909 do
+ let(:award_emoji) { table(:award_emoji) }
+
+ let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
+ let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:issue1) { table(:issues).create!(project_id: project1.id) }
+ let!(:issue2) { table(:issues).create!(project_id: project2.id) }
+ let!(:issue3) { table(:issues).create!(project_id: project2.id) }
+ let!(:issue4) { table(:issues).create!(project_id: project2.id) }
+
+ describe '#perform' do
+ before do
+ add_upvotes(issue1, :thumbsdown, 1)
+ add_upvotes(issue2, :thumbsup, 2)
+ add_upvotes(issue2, :thumbsdown, 1)
+ add_upvotes(issue3, :thumbsup, 3)
+ add_upvotes(issue4, :thumbsup, 4)
+ end
+
+ it 'updates upvotes_count' do
+ subject.perform(issue1.id, issue4.id)
+
+ expect(issue1.reload.upvotes_count).to eq(0)
+ expect(issue2.reload.upvotes_count).to eq(2)
+ expect(issue3.reload.upvotes_count).to eq(3)
+ expect(issue4.reload.upvotes_count).to eq(4)
+ end
+ end
+
+ private
+
+ def add_upvotes(issue, name, count)
+ count.times do
+ award_emoji.create!(
+ name: name.to_s,
+ awardable_type: 'Issue',
+ awardable_id: issue.id
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb
new file mode 100644
index 00000000000..c4039b85459
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/delete_orphaned_deployments_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedDeployments, :migration, schema: 20210617161348 do
+ let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:environment) { table(:environments).create!(name: 'production', slug: 'production', project_id: project.id) }
+ let(:background_migration_jobs) { table(:background_migration_jobs) }
+
+ before do
+ create_deployment!(environment.id, project.id)
+ create_deployment!(non_existing_record_id, project.id)
+ end
+
+ it 'deletes only orphaned deployments' do
+ expect(valid_deployments.pluck(:id)).not_to be_empty
+ expect(orphaned_deployments.pluck(:id)).not_to be_empty
+
+ subject.perform(table(:deployments).minimum(:id), table(:deployments).maximum(:id))
+
+ expect(valid_deployments.pluck(:id)).not_to be_empty
+ expect(orphaned_deployments.pluck(:id)).to be_empty
+ end
+
+ it 'marks jobs as done' do
+ first_job = background_migration_jobs.create!(
+ class_name: 'DeleteOrphanedDeployments',
+ arguments: [table(:deployments).minimum(:id), table(:deployments).minimum(:id)]
+ )
+
+ second_job = background_migration_jobs.create!(
+ class_name: 'DeleteOrphanedDeployments',
+ arguments: [table(:deployments).maximum(:id), table(:deployments).maximum(:id)]
+ )
+
+ subject.perform(table(:deployments).minimum(:id), table(:deployments).minimum(:id))
+
+ expect(first_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:succeeded])
+ expect(second_job.reload.status).to eq(Gitlab::Database::BackgroundMigrationJob.statuses[:pending])
+ end
+
+ private
+
+ def valid_deployments
+ table(:deployments).where('EXISTS (SELECT 1 FROM environments WHERE deployments.environment_id = environments.id)')
+ end
+
+ def orphaned_deployments
+ table(:deployments).where('NOT EXISTS (SELECT 1 FROM environments WHERE deployments.environment_id = environments.id)')
+ end
+
+ def create_deployment!(environment_id, project_id)
+ table(:deployments).create!(
+ environment_id: environment_id,
+ project_id: project_id,
+ ref: 'master',
+ tag: false,
+ sha: 'x',
+ status: 1,
+ iid: table(:deployments).count + 1)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
index 80879c8c6d9..f2cd2acd4f3 100644
--- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
@@ -283,11 +283,11 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, s
end
context 'with Jira service with invalid properties, valid Jira service and valid bugzilla service' do
- let!(:jira_service_invalid) do
+ let!(:jira_integration_invalid) do
services.create!(id: 19, title: 'invalid - title', description: 'invalid - description', type: 'JiraService', properties: 'invalid data', category: 'issue_tracker')
end
- let!(:jira_service_valid) do
+ let!(:jira_integration_valid) do
services.create!(id: 20, type: 'JiraService', properties: jira_properties.to_json, category: 'issue_tracker')
end
@@ -298,21 +298,21 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, s
it 'migrates data for the valid service' do
subject
- jira_service_invalid.reload
- expect(JiraTrackerData.find_by(service_id: jira_service_invalid.id)).to be_nil
- expect(jira_service_invalid.title).to eq('invalid - title')
- expect(jira_service_invalid.description).to eq('invalid - description')
- expect(jira_service_invalid.properties).to eq('invalid data')
+ jira_integration_invalid.reload
+ expect(JiraTrackerData.find_by(service_id: jira_integration_invalid.id)).to be_nil
+ expect(jira_integration_invalid.title).to eq('invalid - title')
+ expect(jira_integration_invalid.description).to eq('invalid - description')
+ expect(jira_integration_invalid.properties).to eq('invalid data')
- jira_service_valid.reload
- data = JiraTrackerData.find_by(service_id: jira_service_valid.id)
+ jira_integration_valid.reload
+ data = JiraTrackerData.find_by(service_id: jira_integration_valid.id)
expect(data.url).to eq(url)
expect(data.api_url).to eq(api_url)
expect(data.username).to eq(username)
expect(data.password).to eq(password)
- expect(jira_service_valid.title).to eq(title)
- expect(jira_service_valid.description).to eq(description)
+ expect(jira_integration_valid.title).to eq(title)
+ expect(jira_integration_valid.description).to eq(description)
bugzilla_integration_valid.reload
data = IssueTrackerData.find_by(service_id: bugzilla_integration_valid.id)
diff --git a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
new file mode 100644
index 00000000000..496ce151032
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb
@@ -0,0 +1,400 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:diffs) { table(:merge_request_diffs) }
+ let(:commits) do
+ table(:merge_request_diff_commits).tap do |t|
+ t.extend(SuppressCompositePrimaryKeyWarning)
+ end
+ end
+
+ let(:commit_users) { described_class::MergeRequestDiffCommitUser }
+
+ let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:merge_request) do
+ merge_requests.create!(
+ source_branch: 'x',
+ target_branch: 'master',
+ target_project_id: project.id
+ )
+ end
+
+ let(:diff) { diffs.create!(merge_request_id: merge_request.id) }
+ let(:migration) { described_class.new }
+
+ describe 'MergeRequestDiffCommit' do
+ describe '.each_row_to_migrate' do
+ it 'yields the rows to migrate for a given range' do
+ commit1 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob',
+ author_email: 'bob@example.com',
+ committer_name: 'bob',
+ committer_email: 'bob@example.com'
+ )
+
+ commit2 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'Alice',
+ author_email: 'alice@example.com',
+ committer_name: 'Alice',
+ committer_email: 'alice@example.com'
+ )
+
+ # We stub this constant to make sure we run at least two pagination
+ # queries for getting the data. This way we can test if the pagination
+ # is actually working properly.
+ stub_const(
+ 'Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers::COMMIT_ROWS_PER_QUERY',
+ 1
+ )
+
+ rows = []
+
+ described_class::MergeRequestDiffCommit.each_row_to_migrate(diff.id, diff.id + 1) do |row|
+ rows << row
+ end
+
+ expect(rows.length).to eq(2)
+
+ expect(rows[0].author_name).to eq(commit1.author_name)
+ expect(rows[1].author_name).to eq(commit2.author_name)
+ end
+ end
+ end
+
+ describe 'MergeRequestDiffCommitUser' do
+ describe '.union' do
+ it 'produces a union of the given queries' do
+ alice = commit_users.create!(name: 'Alice', email: 'alice@example.com')
+ bob = commit_users.create!(name: 'Bob', email: 'bob@example.com')
+ users = commit_users.union([
+ commit_users.where(name: 'Alice').to_sql,
+ commit_users.where(name: 'Bob').to_sql
+ ])
+
+ expect(users).to include(alice)
+ expect(users).to include(bob)
+ end
+ end
+ end
+
+ describe '#perform' do
+ it 'migrates the data in the range' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob',
+ author_email: 'bob@example.com',
+ committer_name: 'bob',
+ committer_email: 'bob@example.com'
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ bob = commit_users.find_by(name: 'bob')
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(bob.id)
+ expect(commit.committer_id).to eq(bob.id)
+ end
+
+ it 'treats empty names and Emails the same as NULL values' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob',
+ author_email: 'bob@example.com',
+ committer_name: '',
+ committer_email: ''
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ bob = commit_users.find_by(name: 'bob')
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(bob.id)
+ expect(commit.committer_id).to be_nil
+ end
+
+ it 'does not update rows without a committer and author' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ commit = commits.first
+
+ expect(commit_users.count).to eq(0)
+ expect(commit.commit_author_id).to be_nil
+ expect(commit.committer_id).to be_nil
+ end
+
+ it 'marks the background job as done' do
+ Gitlab::Database::BackgroundMigrationJob.create!(
+ class_name: 'MigrateMergeRequestDiffCommitUsers',
+ arguments: [diff.id, diff.id + 1]
+ )
+
+ migration.perform(diff.id, diff.id + 1)
+
+ job = Gitlab::Database::BackgroundMigrationJob.first
+
+ expect(job.status).to eq('succeeded')
+ end
+ end
+
+ describe '#get_data_to_update' do
+ it 'returns the users and commit rows to update' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: 'bob' + ('a' * 510),
+ author_email: 'bob@example.com',
+ committer_name: 'bob' + ('a' * 510),
+ committer_email: 'bob@example.com'
+ )
+
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('456abc'),
+ author_name: 'alice',
+ author_email: 'alice@example.com',
+ committer_name: 'alice',
+ committer_email: 'alice@example.com'
+ )
+
+ users, to_update = migration.get_data_to_update(diff.id, diff.id + 1)
+
+ bob_name = 'bob' + ('a' * 509)
+
+ expect(users).to include(%w[alice alice@example.com])
+ expect(users).to include([bob_name, 'bob@example.com'])
+
+ expect(to_update[[diff.id, 0]])
+ .to eq([[bob_name, 'bob@example.com'], [bob_name, 'bob@example.com']])
+
+ expect(to_update[[diff.id, 1]])
+ .to eq([%w[alice alice@example.com], %w[alice alice@example.com]])
+ end
+
+ it 'does not include a user if both the name and Email are missing' do
+ commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc'),
+ author_name: nil,
+ author_email: nil,
+ committer_name: 'bob',
+ committer_email: 'bob@example.com'
+ )
+
+ users, _ = migration.get_data_to_update(diff.id, diff.id + 1)
+
+ expect(users).to eq([%w[bob bob@example.com]].to_set)
+ end
+ end
+
+ describe '#get_user_rows_in_batches' do
+ it 'retrieves all existing users' do
+ alice = commit_users.create!(name: 'alice', email: 'alice@example.com')
+ bob = commit_users.create!(name: 'bob', email: 'bob@example.com')
+
+ users = [[alice.name, alice.email], [bob.name, bob.email]]
+ mapping = {}
+
+ migration.get_user_rows_in_batches(users, mapping)
+
+ expect(mapping[%w[alice alice@example.com]]).to eq(alice)
+ expect(mapping[%w[bob bob@example.com]]).to eq(bob)
+ end
+ end
+
+ describe '#create_missing_users' do
+ it 'creates merge request diff commit users that are missing' do
+ alice = commit_users.create!(name: 'alice', email: 'alice@example.com')
+ users = [%w[alice alice@example.com], %w[bob bob@example.com]]
+ mapping = { %w[alice alice@example.com] => alice }
+
+ migration.create_missing_users(users, mapping)
+
+ expect(mapping[%w[alice alice@example.com]]).to eq(alice)
+ expect(mapping[%w[bob bob@example.com]].name).to eq('bob')
+ expect(mapping[%w[bob bob@example.com]].email).to eq('bob@example.com')
+ end
+ end
+
+ describe '#update_commit_rows' do
+ it 'updates the merge request diff commit rows' do
+ to_update = { [42, 0] => [%w[alice alice@example.com], []] }
+ user_mapping = { %w[alice alice@example.com] => double(:user, id: 1) }
+
+ expect(migration)
+ .to receive(:bulk_update_commit_rows)
+ .with({ [42, 0] => [1, nil] })
+
+ migration.update_commit_rows(to_update, user_mapping)
+ end
+ end
+
+ describe '#bulk_update_commit_rows' do
+ context 'when there are no authors and committers' do
+ it 'does not update any rows' do
+ migration.bulk_update_commit_rows({ [1, 0] => [] })
+
+ expect(described_class::MergeRequestDiffCommit.connection)
+ .not_to receive(:execute)
+ end
+ end
+
+ context 'when there are only authors' do
+ it 'only updates the author IDs' do
+ author = commit_users.create!(name: 'Alice', email: 'alice@example.com')
+ commit = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ mapping = {
+ [commit.merge_request_diff_id, commit.relative_order] =>
+ [author.id, nil]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(author.id)
+ expect(commit.committer_id).to be_nil
+ end
+ end
+
+ context 'when there are only committers' do
+ it 'only updates the committer IDs' do
+ committer =
+ commit_users.create!(name: 'Alice', email: 'alice@example.com')
+
+ commit = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ mapping = {
+ [commit.merge_request_diff_id, commit.relative_order] =>
+ [nil, committer.id]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit = commits.first
+
+ expect(commit.committer_id).to eq(committer.id)
+ expect(commit.commit_author_id).to be_nil
+ end
+ end
+
+ context 'when there are both authors and committers' do
+ it 'updates both the author and committer IDs' do
+ author = commit_users.create!(name: 'Bob', email: 'bob@example.com')
+ committer =
+ commit_users.create!(name: 'Alice', email: 'alice@example.com')
+
+ commit = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ mapping = {
+ [commit.merge_request_diff_id, commit.relative_order] =>
+ [author.id, committer.id]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit = commits.first
+
+ expect(commit.commit_author_id).to eq(author.id)
+ expect(commit.committer_id).to eq(committer.id)
+ end
+ end
+
+ context 'when there are multiple commit rows to update' do
+ it 'updates all the rows' do
+ author = commit_users.create!(name: 'Bob', email: 'bob@example.com')
+ committer =
+ commit_users.create!(name: 'Alice', email: 'alice@example.com')
+
+ commit1 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 0,
+ sha: Gitlab::Database::ShaAttribute.serialize('123abc')
+ )
+
+ commit2 = commits.create!(
+ merge_request_diff_id: diff.id,
+ relative_order: 1,
+ sha: Gitlab::Database::ShaAttribute.serialize('456abc')
+ )
+
+ mapping = {
+ [commit1.merge_request_diff_id, commit1.relative_order] =>
+ [author.id, committer.id],
+
+ [commit2.merge_request_diff_id, commit2.relative_order] =>
+ [author.id, nil]
+ }
+
+ migration.bulk_update_commit_rows(mapping)
+
+ commit1 = commits.find_by(relative_order: 0)
+ commit2 = commits.find_by(relative_order: 1)
+
+ expect(commit1.commit_author_id).to eq(author.id)
+ expect(commit1.committer_id).to eq(committer.id)
+
+ expect(commit2.commit_author_id).to eq(author.id)
+ expect(commit2.committer_id).to be_nil
+ end
+ end
+ end
+
+ describe '#primary_key' do
+ it 'returns the primary key for the commits table' do
+ key = migration.primary_key
+
+ expect(key.to_sql).to eq('("merge_request_diff_commits"."merge_request_diff_id", "merge_request_diff_commits"."relative_order")')
+ end
+ end
+
+ describe '#prepare' do
+ it 'trims a value to at most 512 characters' do
+ expect(migration.prepare('€' * 1_000)).to eq('€' * 512)
+ end
+
+ it 'returns nil if the value is an empty string' do
+ expect(migration.prepare('')).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
index 33498ffa748..9eda51f6ec4 100644
--- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
+require 'webauthn/u2f_migrator'
+
RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20200925125321 do
let(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
index f7466a2ddfd..b96d3f7f0b5 100644
--- a/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl do
+RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl, schema: 20210421163509 do
let(:services_table) { table(:services) }
let(:service_jira_cloud) { services_table.create!(id: 1, type: 'JiraService') }
let(:service_jira_server) { services_table.create!(id: 2, type: 'JiraService') }
diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
index 8d625cab1d8..c0e4d1b5355 100644
--- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
+++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline_status) { described_class.new(project) }
let(:cache_key) { pipeline_status.cache_key }
@@ -83,24 +84,8 @@ RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cac
expect(pipeline_status).not_to be_has_cache
end
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is enabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: project)
- end
-
- it 'makes a Gitaly call' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
- end
- end
-
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is disabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: false)
- end
-
- it 'makes a Gitaly calls' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
- end
+ it 'makes a Gitaly call' do
+ expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
end
end
@@ -111,24 +96,8 @@ RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cac
expect(pipeline_status).to be_has_cache
end
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is enabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: project)
- end
-
- it 'makes no Gitaly calls' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(0)
- end
- end
-
- context 'ci_pipeline_status_omit_commit_sha_in_cache_key is disabled' do
- before do
- stub_feature_flags(ci_pipeline_status_omit_commit_sha_in_cache_key: false)
- end
-
- it 'makes a Gitaly calls' do
- expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(1)
- end
+ it 'makes no Gitaly calls' do
+ expect { pipeline_status.load_status }.to change { Gitlab::GitalyClient.get_request_count }.by(0)
end
end
end
diff --git a/spec/lib/gitlab/cache/helpers_spec.rb b/spec/lib/gitlab/cache/helpers_spec.rb
new file mode 100644
index 00000000000..08e0d7729bd
--- /dev/null
+++ b/spec/lib/gitlab/cache/helpers_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cache::Helpers, :use_clean_rails_redis_caching do
+ subject(:instance) { Class.new.include(described_class).new }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:presenter) { MergeRequestSerializer.new(current_user: user, project: project) }
+
+ before do
+ # We have to stub #render as it's a Rails controller method unavailable in
+ # the module by itself
+ allow(instance).to receive(:render) { |data| data }
+ allow(instance).to receive(:current_user) { user }
+ end
+
+ describe "#render_cached" do
+ subject do
+ instance.render_cached(presentable, **kwargs)
+ end
+
+ let(:kwargs) do
+ {
+ with: presenter,
+ project: project
+ }
+ end
+
+ context 'single object' do
+ let_it_be(:presentable) { create(:merge_request, source_project: project, source_branch: 'wip') }
+
+ it_behaves_like 'object cache helper'
+ end
+
+ context 'collection of objects' do
+ let_it_be(:presentable) do
+ [
+ create(:merge_request, source_project: project, source_branch: 'fix'),
+ create(:merge_request, source_project: project, source_branch: 'master')
+ ]
+ end
+
+ it_behaves_like 'collection cache helper'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index 8ce12f5d32e..f770960e27a 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -100,6 +100,30 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
end
+ describe '.hash_add' do
+ it 'adds a value to a hash' do
+ described_class.hash_add('foo', 1, 1)
+ described_class.hash_add('foo', 2, 2)
+
+ key = described_class.cache_key_for('foo')
+ values = Gitlab::Redis::Cache.with { |r| r.hgetall(key) }
+
+ expect(values).to eq({ '1' => '1', '2' => '2' })
+ end
+ end
+
+ describe '.values_from_hash' do
+ it 'returns empty hash when the hash is empty' do
+ expect(described_class.values_from_hash('foo')).to eq({})
+ end
+
+ it 'returns the set list of values' do
+ described_class.hash_add('foo', 1, 1)
+
+ expect(described_class.values_from_hash('foo')).to eq({ '1' => '1' })
+ end
+ end
+
describe '.write_multiple' do
it 'sets multiple keys when key_prefix not set' do
mapping = { 'foo' => 10, 'bar' => 20 }
diff --git a/spec/lib/gitlab/changelog/config_spec.rb b/spec/lib/gitlab/changelog/config_spec.rb
index 2809843b832..a464c1e57e5 100644
--- a/spec/lib/gitlab/changelog/config_spec.rb
+++ b/spec/lib/gitlab/changelog/config_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::Changelog::Config do
expect(config.date_format).to eq('foo')
expect(config.template)
- .to be_instance_of(Gitlab::Changelog::AST::Expressions)
+ .to be_instance_of(Gitlab::TemplateParser::AST::Expressions)
expect(config.categories).to eq({ 'foo' => 'bar' })
expect(config.tag_regex).to eq('foo')
@@ -53,6 +53,16 @@ RSpec.describe Gitlab::Changelog::Config do
expect { described_class.from_hash(project, 'categories' => 10) }
.to raise_error(Gitlab::Changelog::Error)
end
+
+ it 'raises a Gitlab::Changelog::Error when the template is invalid' do
+ invalid_template = <<~TPL
+ {% each {{foo}} %}
+ {% end %}
+ TPL
+
+ expect { described_class.from_hash(project, 'template' => invalid_template) }
+ .to raise_error(Gitlab::Changelog::Error)
+ end
end
describe '#contributor?' do
diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/container_moved_spec.rb
index 469aea8d093..00ef5604e1d 100644
--- a/spec/lib/gitlab/checks/project_moved_spec.rb
+++ b/spec/lib/gitlab/checks/container_moved_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::Checks::ContainerMoved, :clean_gitlab_redis_shared_state do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
@@ -14,27 +14,48 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
subject { described_class.new(repository, git_user, protocol, redirect_path) }
describe '.fetch_message' do
+ let(:key) { "redirect_namespace:#{user.id}:#{project.repository.gl_repository}" }
+ let(:legacy_key) { "redirect_namespace:#{user.id}:#{project.id}" }
+
context 'with a redirect message queue' do
before do
subject.add_message
end
it 'returns the redirect message' do
- expect(described_class.fetch_message(user.id, project.id)).to eq(subject.message)
+ expect(described_class.fetch_message(user, project.repository)).to eq(subject.message)
end
it 'deletes the redirect message from redis' do
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
- described_class.fetch_message(user.id, project.id)
+ described_class.fetch_message(user, project.repository)
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("redirect_namespace:#{user.id}:#{project.id}") }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ end
+
+ context 'with a message in the legacy key' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(legacy_key, 'legacy message')
+ end
+ end
+
+ it 'returns and deletes the legacy message' do
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).not_to be_nil
+
+ expect(described_class.fetch_message(user, project.repository)).to eq('legacy message')
+
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).to be_nil
+ end
end
end
context 'with no redirect message queue' do
it 'returns nil' do
- expect(described_class.fetch_message(1, 2)).to be_nil
+ expect(described_class.fetch_message(user, project.repository)).to be_nil
end
end
end
@@ -58,7 +79,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
shared_examples 'returns redirect message' do
it do
message = <<~MSG
- Project '#{redirect_path}' was moved to '#{project.full_path}'.
+ #{container_label} '#{redirect_path}' was moved to '#{repository.container.full_path}'.
Please update your Git remote:
@@ -86,6 +107,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
context 'with project' do
it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Project' }
let(:http_url_to_repo) { project.http_url_to_repo }
let(:ssh_url_to_repo) { project.ssh_url_to_repo }
end
@@ -95,6 +117,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let(:repository) { project.wiki.repository }
it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Project wiki' }
let(:http_url_to_repo) { project.wiki.http_url_to_repo }
let(:ssh_url_to_repo) { project.wiki.ssh_url_to_repo }
end
@@ -106,6 +129,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let(:repository) { snippet.repository }
it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Project snippet' }
let(:http_url_to_repo) { snippet.http_url_to_repo }
let(:ssh_url_to_repo) { snippet.ssh_url_to_repo }
end
@@ -116,8 +140,10 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let(:repository) { snippet.repository }
- it 'returns nil' do
- expect(subject.add_message).to be_nil
+ it_behaves_like 'errors per protocol' do
+ let(:container_label) { 'Personal snippet' }
+ let(:http_url_to_repo) { snippet.http_url_to_repo }
+ let(:ssh_url_to_repo) { snippet.ssh_url_to_repo }
end
end
end
diff --git a/spec/lib/gitlab/checks/project_created_spec.rb b/spec/lib/gitlab/checks/project_created_spec.rb
index 74e43b04b6b..6a2e4201030 100644
--- a/spec/lib/gitlab/checks/project_created_spec.rb
+++ b/spec/lib/gitlab/checks/project_created_spec.rb
@@ -13,27 +13,48 @@ RSpec.describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state
subject { described_class.new(repository, git_user, 'http') }
describe '.fetch_message' do
+ let(:key) { "project_created:#{user.id}:#{project.repository.gl_repository}" }
+ let(:legacy_key) { "project_created:#{user.id}:#{project.id}" }
+
context 'with a project created message queue' do
before do
subject.add_message
end
it 'returns project created message' do
- expect(described_class.fetch_message(user.id, project.id)).to eq(subject.message)
+ expect(described_class.fetch_message(user, project.repository)).to eq(subject.message)
end
it 'deletes the project created message from redis' do
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
+
+ described_class.fetch_message(user, project.repository)
+
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ end
+
+ context 'with a message in the legacy key' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(legacy_key, 'legacy message')
+ end
+ end
+
+ it 'returns and deletes the legacy message' do
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).not_to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).not_to be_nil
- described_class.fetch_message(user.id, project.id)
+ expect(described_class.fetch_message(user, project.repository)).to eq('legacy message')
- expect(Gitlab::Redis::SharedState.with { |redis| redis.get("project_created:#{user.id}:#{project.id}") }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(key) }).to be_nil
+ expect(Gitlab::Redis::SharedState.with { |redis| redis.get(legacy_key) }).to be_nil
+ end
end
end
context 'with no project created message queue' do
it 'returns nil' do
- expect(described_class.fetch_message(1, 2)).to be_nil
+ expect(described_class.fetch_message(user, project.repository)).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/ci/ansi2json/line_spec.rb b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
index 909c0f1b3ea..d16750d19f1 100644
--- a/spec/lib/gitlab/ci/ansi2json/line_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
@@ -76,30 +76,25 @@ RSpec.describe Gitlab::Ci::Ansi2json::Line do
end
describe '#set_section_duration' do
- shared_examples 'set_section_duration' do
- it 'sets and formats the section_duration' do
- subject.set_section_duration(75)
+ using RSpec::Parameterized::TableSyntax
- expect(subject.section_duration).to eq('01:15')
- end
+ where(:duration, :result) do
+ nil | '00:00'
+ 'string' | '00:00'
+ 0.seconds | '00:00'
+ 7.seconds | '00:07'
+ 75 | '01:15'
+ 1.minute + 15.seconds | '01:15'
+ 13.hours + 14.minutes + 15.seconds | '13:14:15'
+ 1.day + 13.hours + 14.minutes + 15.seconds | '37:14:15'
end
- context 'with default timezone' do
- it_behaves_like 'set_section_duration'
- end
+ with_them do
+ it do
+ subject.set_section_duration(duration)
- context 'with a timezone carrying minutes offset' do
- before do
- # The actual call by does use Time.at(...).utc that the following
- # rubocop rule (Rails/TimeZone) suggests, but for this specific
- # test's purposes we needed to mock at the Time.at call point.
-
- # rubocop:disable Rails/TimeZone
- allow(Time).to receive(:at).with(75).and_return(Time.at(75, in: '+05:30'))
- # rubocop:enable Rails/TimeZone
+ expect(subject.section_duration).to eq(result)
end
-
- it_behaves_like 'set_section_duration'
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
index 0e6d5b6c311..7476fc6c25f 100644
--- a/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/artifacts_spec.rb
@@ -143,51 +143,22 @@ RSpec.describe Gitlab::Ci::Config::Entry::Artifacts do
end
describe 'excluded artifacts' do
- context 'when configuration is valid and the feature is enabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: true)
- end
-
- context 'when configuration is valid' do
- let(:config) { { untracked: true, exclude: ['some/directory/'] } }
-
- it 'correctly parses the configuration' do
- expect(entry).to be_valid
- expect(entry.value).to eq config
- end
- end
+ context 'when configuration is valid' do
+ let(:config) { { untracked: true, exclude: ['some/directory/'] } }
- context 'when configuration is not valid' do
- let(:config) { { untracked: true, exclude: 1234 } }
-
- it 'returns an error' do
- expect(entry).not_to be_valid
- expect(entry.errors)
- .to include 'artifacts exclude should be an array of strings'
- end
+ it 'correctly parses the configuration' do
+ expect(entry).to be_valid
+ expect(entry.value).to eq config
end
end
- context 'when artifacts/exclude feature is disabled' do
- before do
- stub_feature_flags(ci_artifacts_exclude: false)
- end
-
- context 'when configuration has been provided' do
- let(:config) { { untracked: true, exclude: ['some/directory/'] } }
-
- it 'returns an error' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'artifacts exclude feature is disabled'
- end
- end
+ context 'when configuration is not valid' do
+ let(:config) { { untracked: true, exclude: 1234 } }
- context 'when configuration is not present' do
- let(:config) { { untracked: true } }
-
- it 'is a valid configuration' do
- expect(entry).to be_valid
- end
+ it 'returns an error' do
+ expect(entry).not_to be_valid
+ expect(entry.errors)
+ .to include 'artifacts exclude should be an array of strings'
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index d8907f7015b..12b8960eb32 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
:secret_detection | 'gl-secret-detection-report.json'
:dependency_scanning | 'gl-dependency-scanning-report.json'
:container_scanning | 'gl-container-scanning-report.json'
+ :cluster_image_scanning | 'gl-cluster-image-scanning-report.json'
:dast | 'gl-dast-report.json'
:license_scanning | 'gl-license-scanning-report.json'
:performance | 'performance.json'
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index aaa3a7a8b9d..77f6608eb85 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -247,7 +247,7 @@ RSpec.describe Gitlab::Ci::Lint do
include_context 'advanced validations' do
it 'runs advanced logical validations' do
expect(subject).not_to be_valid
- expect(subject.errors).to eq(["'test' job needs 'build' job, but it was not added to the pipeline"])
+ expect(subject.errors).to eq(["'test' job needs 'build' job, but 'build' is not in any previous stage"])
end
end
diff --git a/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb b/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb
index d6492caa31a..6b3fef33182 100644
--- a/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb
+++ b/spec/lib/gitlab/ci/matching/runner_matcher_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Matching::RunnerMatcher do
let(:dummy_attributes) do
{
+ runner_ids: [1],
runner_type: 'instance_type',
public_projects_minutes_cost_factor: 0,
private_projects_minutes_cost_factor: 1,
@@ -26,6 +27,8 @@ RSpec.describe Gitlab::Ci::Matching::RunnerMatcher do
context 'with attributes' do
let(:attributes) { dummy_attributes }
+ it { expect(matcher.runner_ids).to eq([1]) }
+
it { expect(matcher.runner_type).to eq('instance_type') }
it { expect(matcher.public_projects_minutes_cost_factor).to eq(0) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 2e537f40692..687bb82a8ef 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -203,18 +203,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
expect(rspec_variables['VAR1']).to eq('overridden var 1')
end
-
- context 'when the FF ci_workflow_rules_variables is disabled' do
- before do
- stub_feature_flags(ci_workflow_rules_variables: false)
- end
-
- it 'sends root variable' do
- run_chain
-
- expect(rspec_variables['VAR1']).to eq('var 1')
- end
- end
end
context 'N+1 queries' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 020f957cf70..58938251ca1 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -11,8 +11,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:previous_stages) { [] }
+ let(:current_stage) { double(seeds_names: [attributes[:name]]) }
- let(:seed_build) { described_class.new(seed_context, attributes, previous_stages) }
+ let(:seed_build) { described_class.new(seed_context, attributes, previous_stages, current_stage) }
describe '#attributes' do
subject { seed_build.attributes }
@@ -90,6 +91,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
+ context 'with job:tags' do
+ let(:attributes) do
+ {
+ name: 'rspec',
+ ref: 'master',
+ job_variables: [{ key: 'VARIABLE', value: 'value', public: true }],
+ tag_list: ['static-tag', '$VARIABLE', '$NO_VARIABLE']
+ }
+ end
+
+ it { is_expected.to include(tag_list: ['static-tag', 'value', '$NO_VARIABLE']) }
+ it { is_expected.to include(yaml_variables: [{ key: 'VARIABLE', value: 'value', public: true }]) }
+ end
+
context 'with cache:key' do
let(:attributes) do
{
@@ -250,19 +265,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
{ key: 'VAR4', value: 'new var pipeline 4', public: true }]
)
end
-
- context 'when FF ci_workflow_rules_variables is disabled' do
- before do
- stub_feature_flags(ci_workflow_rules_variables: false)
- end
-
- it 'returns existing yaml variables' do
- expect(subject[:yaml_variables]).to match_array(
- [{ key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }]
- )
- end
- end
end
context 'when root_variables_inheritance is false' do
@@ -1092,7 +1094,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it "returns an error" do
expect(subject.errors).to contain_exactly(
- "'rspec' job needs 'build' job, but it was not added to the pipeline")
+ "'rspec' job needs 'build' job, but 'build' is not in any previous stage")
end
context 'when the needed job is optional' do
@@ -1128,6 +1130,28 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
+ context 'when build job is part of the same stage' do
+ let(:current_stage) { double(seeds_names: [attributes[:name], 'build']) }
+
+ it 'is included' do
+ is_expected.to be_included
+ end
+
+ it 'does not have errors' do
+ expect(subject.errors).to be_empty
+ end
+
+ context 'when ci_same_stage_job_needs FF is disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it 'has errors' do
+ expect(subject.errors).to contain_exactly("'rspec' job needs 'build' job, but 'build' is not in any previous stage")
+ end
+ end
+ end
+
context 'when using 101 needs' do
let(:needs_count) { 101 }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
index 21be8660def..3424e7d03a3 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
@@ -34,6 +34,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
described_class.new(seed_context, stages_attributes)
end
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
describe '#stages' do
it 'returns the stage resources' do
stages = seed.stages
@@ -65,7 +69,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
}
expect(seed.errors).to contain_exactly(
- "'invalid_job' job needs 'non-existent' job, but it was not added to the pipeline")
+ "'invalid_job' job needs 'non-existent' job, but 'non-existent' is not in any previous stage")
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb b/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
index 89602fe79d1..62ff7fcafea 100644
--- a/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
let_it_be(:cousin_parent) { create(:ci_pipeline, project: project) }
let_it_be(:cousin) { create(:ci_pipeline, project: project) }
let_it_be(:triggered_pipeline) { create(:ci_pipeline) }
+ let_it_be(:triggered_child_pipeline) { create(:ci_pipeline) }
before_all do
create_source_pipeline(ancestor, parent)
@@ -19,19 +20,20 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
create_source_pipeline(parent, child)
create_source_pipeline(cousin_parent, cousin)
create_source_pipeline(child, triggered_pipeline)
+ create_source_pipeline(triggered_pipeline, triggered_child_pipeline)
end
describe '#base_and_ancestors' do
it 'includes the base and its ancestors' do
relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
- options: { same_project: true }).base_and_ancestors
+ options: { project_condition: :same }).base_and_ancestors
expect(relation).to contain_exactly(ancestor, parent)
end
it 'can find ancestors upto a certain level' do
relation = described_class.new(::Ci::Pipeline.where(id: child.id),
- options: { same_project: true }).base_and_ancestors(upto: ancestor.id)
+ options: { project_condition: :same }).base_and_ancestors(upto: ancestor.id)
expect(relation).to contain_exactly(parent, child)
end
@@ -39,7 +41,7 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
describe 'hierarchy_order option' do
let(:relation) do
described_class.new(::Ci::Pipeline.where(id: child.id),
- options: { same_project: true }).base_and_ancestors(hierarchy_order: hierarchy_order)
+ options: { project_condition: :same }).base_and_ancestors(hierarchy_order: hierarchy_order)
end
context ':asc' do
@@ -63,15 +65,32 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
describe '#base_and_descendants' do
it 'includes the base and its descendants' do
relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
- options: { same_project: true }).base_and_descendants
+ options: { project_condition: :same }).base_and_descendants
expect(relation).to contain_exactly(parent, child)
end
+ context 'when project_condition: :different' do
+ it "includes the base and other project pipelines" do
+ relation = described_class.new(::Ci::Pipeline.where(id: child.id),
+ options: { project_condition: :different }).base_and_descendants
+
+ expect(relation).to contain_exactly(child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
+
+ context 'when project_condition: nil' do
+ it "includes the base and its descendants with other project pipeline" do
+ relation = described_class.new(::Ci::Pipeline.where(id: parent.id)).base_and_descendants
+
+ expect(relation).to contain_exactly(parent, child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
+
context 'when with_depth is true' do
let(:relation) do
described_class.new(::Ci::Pipeline.where(id: ancestor.id),
- options: { same_project: true }).base_and_descendants(with_depth: true)
+ options: { project_condition: :same }).base_and_descendants(with_depth: true)
end
it 'includes depth in the results' do
@@ -91,21 +110,51 @@ RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
end
describe '#all_objects' do
- it 'includes its ancestors and descendants' do
- relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
- options: { same_project: true }).all_objects
+ context 'when passing ancestors_base' do
+ let(:options) { { project_condition: project_condition } }
+ let(:ancestors_base) { ::Ci::Pipeline.where(id: child.id) }
+
+ subject(:relation) { described_class.new(ancestors_base, options: options).all_objects }
- expect(relation).to contain_exactly(ancestor, parent, child)
+ context 'when project_condition: :same' do
+ let(:project_condition) { :same }
+
+ it "includes its ancestors and descendants" do
+ expect(relation).to contain_exactly(ancestor, parent, child)
+ end
+ end
+
+ context 'when project_condition: :different' do
+ let(:project_condition) { :different }
+
+ it "includes the base and other project pipelines" do
+ expect(relation).to contain_exactly(child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
end
- it 'returns all family tree' do
- relation = described_class.new(
- ::Ci::Pipeline.where(id: child.id),
- described_class.new(::Ci::Pipeline.where(id: child.id), options: { same_project: true }).base_and_ancestors,
- options: { same_project: true }
- ).all_objects
+ context 'when passing ancestors_base and descendants_base' do
+ let(:options) { { project_condition: project_condition } }
+ let(:ancestors_base) { ::Ci::Pipeline.where(id: child.id) }
+ let(:descendants_base) { described_class.new(::Ci::Pipeline.where(id: child.id), options: options).base_and_ancestors }
+
+ subject(:relation) { described_class.new(ancestors_base, descendants_base, options: options).all_objects }
+
+ context 'when project_condition: :same' do
+ let(:project_condition) { :same }
- expect(relation).to contain_exactly(ancestor, parent, cousin_parent, child, cousin)
+ it 'returns all family tree' do
+ expect(relation).to contain_exactly(ancestor, parent, cousin_parent, child, cousin)
+ end
+ end
+
+ context 'when project_condition: :different' do
+ let(:project_condition) { :different }
+
+ it "includes the base and other project pipelines" do
+ expect(relation).to contain_exactly(child, triggered_pipeline, triggered_child_pipeline)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/security/identifier_spec.rb b/spec/lib/gitlab/ci/reports/security/identifier_spec.rb
new file mode 100644
index 00000000000..123730b6ee6
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/identifier_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Identifier do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#initialize' do
+ subject { described_class.new(**params) }
+
+ let(:params) do
+ {
+ external_type: 'brakeman_warning_code',
+ external_id: '107',
+ name: 'Brakeman Warning Code 107',
+ url: 'https://brakemanscanner.org/docs/warning_types/cross_site_scripting/'
+ }
+ end
+
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ external_type: 'brakeman_warning_code',
+ external_id: '107',
+ fingerprint: 'aa2254904a69148ad14b6ac5db25b355da9c987f',
+ name: 'Brakeman Warning Code 107',
+ url: 'https://brakemanscanner.org/docs/warning_types/cross_site_scripting/'
+ )
+ end
+ end
+
+ %i[external_type external_id name].each do |attribute|
+ context "when attribute #{attribute} is missing" do
+ before do
+ params.delete(attribute)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+ end
+
+ describe '#key' do
+ let(:identifier) { create(:ci_reports_security_identifier) }
+
+ subject { identifier.key }
+
+ it 'returns fingerprint' do
+ is_expected.to eq(identifier.fingerprint)
+ end
+ end
+
+ describe '#type_identifier?' do
+ where(:external_type, :expected_result) do
+ 'cve' | false
+ 'foo' | false
+ 'cwe' | true
+ 'wasc' | true
+ end
+
+ with_them do
+ let(:identifier) { create(:ci_reports_security_identifier, external_type: external_type) }
+
+ subject { identifier.type_identifier? }
+
+ it { is_expected.to be(expected_result) }
+ end
+ end
+
+ describe 'external type check methods' do
+ where(:external_type, :is_cve?, :is_cwe?, :is_wasc?) do
+ 'Foo' | false | false | false
+ 'Cve' | true | false | false
+ 'Cwe' | false | true | false
+ 'Wasc' | false | false | true
+ end
+
+ with_them do
+ let(:identifier) { create(:ci_reports_security_identifier, external_type: external_type) }
+
+ it 'returns correct result for the type check method' do
+ expect(identifier.cve?).to be(is_cve?)
+ expect(identifier.cwe?).to be(is_cwe?)
+ expect(identifier.wasc?).to be(is_wasc?)
+ end
+ end
+ end
+
+ describe '#to_hash' do
+ let(:identifier) { create(:ci_reports_security_identifier) }
+
+ subject { identifier.to_hash }
+
+ it 'returns expected hash' do
+ is_expected.to eq({
+ external_type: identifier.external_type,
+ external_id: identifier.external_id,
+ fingerprint: identifier.fingerprint,
+ name: identifier.name,
+ url: identifier.url
+ })
+ end
+ end
+
+ describe '#==' do
+ where(:type_1, :id_1, :type_2, :id_2, :equal, :case_name) do
+ 'CVE' | '2018-1234' | 'CVE' | '2018-1234' | true | 'when external_type and external_id are equal'
+ 'CVE' | '2018-1234' | 'brakeman_code' | '2018-1234' | false | 'when external_type is different'
+ 'CVE' | '2018-1234' | 'CVE' | '2019-6789' | false | 'when external_id is different'
+ end
+
+ with_them do
+ let(:identifier_1) { create(:ci_reports_security_identifier, external_type: type_1, external_id: id_1) }
+ let(:identifier_2) { create(:ci_reports_security_identifier, external_type: type_2, external_id: id_2) }
+
+ it "returns #{params[:equal]}" do
+ expect(identifier_1 == identifier_2).to eq(equal)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/link_spec.rb b/spec/lib/gitlab/ci/reports/security/link_spec.rb
new file mode 100644
index 00000000000..7b55af27f4d
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/link_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Link do
+ subject(:security_link) { described_class.new(name: 'CVE-2020-0202', url: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0202') }
+
+ describe '#initialize' do
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ name: 'CVE-2020-0202',
+ url: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0202'
+ )
+ end
+ end
+
+ describe '#to_hash' do
+ it 'returns expected hash' do
+ expect(security_link.to_hash).to eq(
+ {
+ name: 'CVE-2020-0202',
+ url: 'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0202'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/scan_spec.rb b/spec/lib/gitlab/ci/reports/security/scan_spec.rb
new file mode 100644
index 00000000000..b4968ff3a6e
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/scan_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Scan do
+ describe '#initialize' do
+ subject { described_class.new(params.with_indifferent_access) }
+
+ let(:params) do
+ {
+ status: 'success',
+ type: 'dependency-scanning',
+ start_time: 'placeholer',
+ end_time: 'placholder'
+ }
+ end
+
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ status: 'success',
+ type: 'dependency-scanning',
+ start_time: 'placeholer',
+ end_time: 'placholder'
+ )
+ end
+ end
+
+ describe '#to_hash' do
+ subject { described_class.new(params.with_indifferent_access).to_hash }
+
+ it 'returns expected hash' do
+ is_expected.to eq(
+ {
+ status: 'success',
+ type: 'dependency-scanning',
+ start_time: 'placeholer',
+ end_time: 'placholder'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb b/spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb
new file mode 100644
index 00000000000..e9daa05e8b9
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/scanned_resource_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::ScannedResource do
+ let(:url) { 'http://example.com:3001/1?foo=bar' }
+ let(:request_method) { 'GET' }
+
+ context 'when the URI is not a URI' do
+ subject { ::Gitlab::Ci::Reports::Security::ScannedResource.new(url, request_method) }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the URL is valid' do
+ subject { ::Gitlab::Ci::Reports::Security::ScannedResource.new(URI.parse(url), request_method) }
+
+ it 'sets the URL attributes' do
+ expect(subject.request_method).to eq(request_method)
+ expect(subject.request_uri.to_s).to eq(url)
+ expect(subject.url_scheme).to eq('http')
+ expect(subject.url_host).to eq('example.com')
+ expect(subject.url_port).to eq(3001)
+ expect(subject.url_path).to eq('/1')
+ expect(subject.url_query).to eq('foo=bar')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/scanner_spec.rb b/spec/lib/gitlab/ci/reports/security/scanner_spec.rb
new file mode 100644
index 00000000000..99f5d4723d3
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/security/scanner_spec.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Security::Scanner do
+ describe '#initialize' do
+ subject { described_class.new(**params) }
+
+ let(:params) do
+ {
+ external_id: 'brakeman',
+ name: 'Brakeman',
+ vendor: 'GitLab',
+ version: '1.0.1'
+ }
+ end
+
+ context 'when all params are given' do
+ it 'initializes an instance' do
+ expect { subject }.not_to raise_error
+
+ expect(subject).to have_attributes(
+ external_id: 'brakeman',
+ name: 'Brakeman',
+ vendor: 'GitLab'
+ )
+ end
+ end
+
+ %i[external_id name].each do |attribute|
+ context "when attribute #{attribute} is missing" do
+ before do
+ params.delete(attribute)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+ end
+
+ describe '#key' do
+ let(:scanner) { create(:ci_reports_security_scanner) }
+
+ subject { scanner.key }
+
+ it 'returns external_id' do
+ is_expected.to eq(scanner.external_id)
+ end
+ end
+
+ describe '#to_hash' do
+ let(:scanner) { create(:ci_reports_security_scanner) }
+
+ subject { scanner.to_hash }
+
+ it 'returns expected hash' do
+ is_expected.to eq({
+ external_id: scanner.external_id,
+ name: scanner.name,
+ vendor: scanner.vendor
+ })
+ end
+
+ context 'when vendor is not defined' do
+ let(:scanner) { create(:ci_reports_security_scanner, vendor: nil) }
+
+ it 'returns expected hash' do
+ is_expected.to eq({
+ external_id: scanner.external_id,
+ name: scanner.name
+ })
+ end
+ end
+ end
+
+ describe '#==' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:id_1, :id_2, :equal, :case_name) do
+ 'brakeman' | 'brakeman' | true | 'when external_id is equal'
+ 'brakeman' | 'bandit' | false | 'when external_id is different'
+ end
+
+ with_them do
+ let(:scanner_1) { create(:ci_reports_security_scanner, external_id: id_1) }
+ let(:scanner_2) { create(:ci_reports_security_scanner, external_id: id_2) }
+
+ it "returns #{params[:equal]}" do
+ expect(scanner_1 == scanner_2).to eq(equal)
+ end
+ end
+ end
+
+ describe '#<=>' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:scanner_1) { create(:ci_reports_security_scanner, **scanner_1_attributes) }
+ let(:scanner_2) { create(:ci_reports_security_scanner, **scanner_2_attributes) }
+
+ subject { scanner_1 <=> scanner_2 }
+
+ context 'when the `external_id` of the scanners are different' do
+ where(:scanner_1_attributes, :scanner_2_attributes, :expected_comparison_result) do
+ { external_id: 'bundler_audit', name: 'foo', vendor: 'bar' } | { external_id: 'retire.js', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'retire.js', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium-maven', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium-maven', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium-python', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium-python', name: 'foo', vendor: 'bar' } | { external_id: 'bandit', name: 'foo', vendor: 'bar' } | 1
+ { external_id: 'bandit', name: 'foo', vendor: 'bar' } | { external_id: 'semgrep', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'semgrep', name: 'foo', vendor: 'bar' } | { external_id: 'unknown', name: 'foo', vendor: 'bar' } | -1
+ { external_id: 'gemnasium', name: 'foo', vendor: 'bar' } | { external_id: 'gemnasium', name: 'foo', vendor: nil } | 1
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_comparison_result) }
+ end
+ end
+
+ context 'when the `external_id` of the scanners are equal' do
+ context 'when the `name` of the scanners are different' do
+ where(:scanner_1_attributes, :scanner_2_attributes, :expected_comparison_result) do
+ { external_id: 'gemnasium', name: 'a', vendor: 'bar' } | { external_id: 'gemnasium', name: 'b', vendor: 'bar' } | -1
+ { external_id: 'gemnasium', name: 'd', vendor: 'bar' } | { external_id: 'gemnasium', name: 'c', vendor: 'bar' } | 1
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_comparison_result) }
+ end
+ end
+
+ context 'when the `name` of the scanners are equal' do
+ where(:scanner_1_attributes, :scanner_2_attributes, :expected_comparison_result) do
+ { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | 0 # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
+ { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | { external_id: 'gemnasium', name: 'foo', vendor: 'b' } | -1
+ { external_id: 'gemnasium', name: 'foo', vendor: 'b' } | { external_id: 'gemnasium', name: 'foo', vendor: 'a' } | 1
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_comparison_result) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/test_case_spec.rb b/spec/lib/gitlab/ci/reports/test_case_spec.rb
index 668a475514e..d21359368b8 100644
--- a/spec/lib/gitlab/ci/reports/test_case_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_case_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Ci::Reports::TestCase, :aggregate_failures do
end
it '#attachment_url' do
- expect(attachment_test_case.attachment_url).to match(/file\/some\/path.png/)
+ expect(attachment_test_case.attachment_url).to match(%r{file/some/path.png})
end
end
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index 2b9523bd83d..cceabc35e85 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -82,25 +82,6 @@ RSpec.describe Gitlab::Ci::Status::Composite do
it_behaves_like 'compares status and warnings'
end
-
- context 'when FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
- before do
- stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
- end
-
- where(:build_statuses, :dag, :result, :has_warnings) do
- %i(success manual) | true | 'pending' | false
- %i(success manual) | false | 'success' | false
- end
-
- with_them do
- let(:all_statuses) do
- build_statuses.map { |status| @statuses_with_allow_failure[status] }
- end
-
- it_behaves_like 'compares status and warnings'
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
index 653b3be0b2a..e8aeb93a2ba 100644
--- a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('AWS/Deploy-ECS') }
describe 'the created pipeline' do
- let(:default_branch) { 'master' }
+ let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
@@ -38,7 +38,7 @@ RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
let(:pipeline_branch) { 'test_branch' }
before do
- project.repository.create_branch(pipeline_branch)
+ project.repository.create_branch(pipeline_branch, default_branch)
end
it_behaves_like 'no pipeline yaml error'
diff --git a/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
index 0e458e01a2c..151880e27a3 100644
--- a/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/managed_cluster_applications_gitlab_ci_yaml_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe 'Managed-Cluster-Applications.gitlab-ci.yml' do
let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
let(:pipeline) { service.execute!(:push) }
let(:build_names) { pipeline.builds.pluck(:name) }
- let(:pipeline_branch) { 'master' }
+ let(:default_branch) { project.default_branch_or_main }
+ let(:pipeline_branch) { default_branch }
before do
stub_ci_pipeline_yaml_file(template.content)
@@ -28,7 +29,7 @@ RSpec.describe 'Managed-Cluster-Applications.gitlab-ci.yml' do
let(:pipeline_branch) { 'a_branch' }
before do
- project.repository.create_branch(pipeline_branch)
+ project.repository.create_branch(pipeline_branch, default_branch)
end
it 'has no jobs' do
diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
index 4377f155d34..5ab3035486f 100644
--- a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform.latest') }
describe 'the created pipeline' do
- let(:default_branch) { 'master' }
+ let(:default_branch) { project.default_branch_or_main }
let(:pipeline_branch) { default_branch }
let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
let(:user) { project.owner }
@@ -34,7 +34,7 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
let(:pipeline_branch) { 'patch-1' }
before do
- project.repository.create_branch(pipeline_branch)
+ project.repository.create_branch(pipeline_branch, default_branch)
end
it 'does not creates a deploy and a test job' do
diff --git a/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
new file mode 100644
index 00000000000..af1b43f6b01
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/dag_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::YamlProcessor::Dag do
+ let(:nodes) { {} }
+
+ subject(:result) { described_class.new(nodes).tsort }
+
+ context 'when it is a regular pipeline' do
+ let(:nodes) do
+ { 'job_c' => %w(job_b job_d), 'job_d' => %w(job_a), 'job_b' => %w(job_a), 'job_a' => %w() }
+ end
+
+ it 'returns ordered jobs' do
+ expect(result).to eq(%w(job_a job_b job_d job_c))
+ end
+ end
+
+ context 'when there is a circular dependency' do
+ let(:nodes) do
+ { 'job_a' => %w(job_c), 'job_b' => %w(job_a), 'job_c' => %w(job_b) }
+ end
+
+ it 'raises TSort::Cyclic' do
+ expect { result }.to raise_error(TSort::Cyclic, /topological sort failed/)
+ end
+ end
+
+ context 'when there is a missing job' do
+ let(:nodes) do
+ { 'job_a' => %w(job_d), 'job_b' => %w(job_a) }
+ end
+
+ it 'raises MissingNodeError' do
+ expect { result }.to raise_error(
+ Gitlab::Ci::YamlProcessor::Dag::MissingNodeError, 'node job_d is missing'
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index e8e44f884cf..19c2e34a0f0 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -595,7 +595,15 @@ module Gitlab
EOYML
end
- it_behaves_like 'has warnings and expected error', /build job: need test is not defined in prior stages/
+ it_behaves_like 'has warnings and expected error', /build job: need test is not defined in current or prior stages/
+
+ context 'with ci_same_stage_job_needs FF disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it_behaves_like 'has warnings and expected error', /build job: need test is not defined in prior stages/
+ end
end
end
end
@@ -1648,8 +1656,6 @@ module Gitlab
end
it 'populates a build options with complete artifacts configuration' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
config = <<~YAML
test:
script: echo "Hello World"
@@ -1860,7 +1866,7 @@ module Gitlab
build2: { stage: 'build', script: 'test' },
test1: { stage: 'test', script: 'test', dependencies: dependencies },
test2: { stage: 'test', script: 'test' },
- deploy: { stage: 'test', script: 'test' }
+ deploy: { stage: 'deploy', script: 'test' }
}
end
@@ -1893,7 +1899,15 @@ module Gitlab
context 'dependencies to deploy' do
let(:dependencies) { ['deploy'] }
- it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in prior stages'
+ it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in current or prior stages'
+
+ context 'with ci_same_stage_job_needs FF disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in prior stages'
+ end
end
context 'when a job depends on another job that references a not-yet defined stage' do
@@ -1918,7 +1932,7 @@ module Gitlab
}
end
- it_behaves_like 'returns errors', /is not defined in prior stages/
+ it_behaves_like 'returns errors', /is not defined in current or prior stages/
end
end
@@ -1933,7 +1947,7 @@ module Gitlab
parallel: { stage: 'build', script: 'test', parallel: 2 },
test1: { stage: 'test', script: 'test', needs: needs, dependencies: dependencies },
test2: { stage: 'test', script: 'test' },
- deploy: { stage: 'test', script: 'test' }
+ deploy: { stage: 'deploy', script: 'test' }
}
end
@@ -1943,6 +1957,45 @@ module Gitlab
it { is_expected.to be_valid }
end
+ context 'needs a job from the same stage' do
+ let(:needs) { %w(test2) }
+
+ it 'creates jobs with valid specifications' do
+ expect(subject.builds.size).to eq(7)
+ expect(subject.builds[0]).to eq(
+ stage: 'build',
+ stage_idx: 1,
+ name: 'build1',
+ only: { refs: %w[branches tags] },
+ options: {
+ script: ['test']
+ },
+ when: 'on_success',
+ allow_failure: false,
+ yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ )
+ expect(subject.builds[4]).to eq(
+ stage: 'test',
+ stage_idx: 2,
+ name: 'test1',
+ only: { refs: %w[branches tags] },
+ options: { script: ['test'] },
+ needs_attributes: [
+ { name: 'test2', artifacts: true, optional: false }
+ ],
+ when: 'on_success',
+ allow_failure: false,
+ yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :dag
+ )
+ end
+ end
+
context 'needs two builds' do
let(:needs) { %w(build1 build2) }
@@ -2098,7 +2151,15 @@ module Gitlab
context 'needs to deploy' do
let(:needs) { ['deploy'] }
- it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in prior stages'
+ it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in current or prior stages'
+
+ context 'with ci_same_stage_job_needs FF disabled' do
+ before do
+ stub_feature_flags(ci_same_stage_job_needs: false)
+ end
+
+ it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in prior stages'
+ end
end
context 'needs and dependencies that are mismatching' do
@@ -2769,6 +2830,29 @@ module Gitlab
it_behaves_like 'returns errors', 'jobs:rspec:parallel should be an integer or a hash'
end
+
+ context 'when the pipeline has a circular dependency' do
+ let(:config) do
+ <<~YAML
+ job_a:
+ stage: test
+ script: build
+ needs: [job_c]
+
+ job_b:
+ stage: test
+ script: test
+ needs: [job_a]
+
+ job_c:
+ stage: test
+ script: deploy
+ needs: [job_b]
+ YAML
+ end
+
+ it_behaves_like 'returns errors', 'The pipeline has circular dependencies.'
+ end
end
describe '#execute' do
diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb
index 37349c30224..279486aa2a1 100644
--- a/spec/lib/gitlab/closing_issue_extractor_spec.rb
+++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::ClosingIssueExtractor do
let_it_be(:project2) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:issue2) { create(:issue, project: project2) }
+
let(:reference) { issue.to_reference }
let(:cross_reference) { issue2.to_reference(project) }
@@ -351,6 +352,7 @@ RSpec.describe Gitlab::ClosingIssueExtractor do
context 'with multiple references' do
let_it_be(:other_issue) { create(:issue, project: project) }
let_it_be(:third_issue) { create(:issue, project: project) }
+
let(:reference2) { other_issue.to_reference }
let(:reference3) { third_issue.to_reference }
diff --git a/spec/lib/gitlab/composer/cache_spec.rb b/spec/lib/gitlab/composer/cache_spec.rb
index 00318ac14f9..071771960c6 100644
--- a/spec/lib/gitlab/composer/cache_spec.rb
+++ b/spec/lib/gitlab/composer/cache_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Composer::Cache do
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+
let(:branch) { project.repository.find_branch('master') }
let(:sha_regex) { /^[A-Fa-f0-9]{64}$/ }
diff --git a/spec/lib/gitlab/consul/internal_spec.rb b/spec/lib/gitlab/consul/internal_spec.rb
index 5889dd8b41d..28dcaac9ff2 100644
--- a/spec/lib/gitlab/consul/internal_spec.rb
+++ b/spec/lib/gitlab/consul/internal_spec.rb
@@ -134,6 +134,6 @@ RSpec.describe Gitlab::Consul::Internal do
end
def stub_consul_discover_prometheus
- stub_request(:get, /v1\/catalog\/service\/prometheus/)
+ stub_request(:get, %r{v1/catalog/service/prometheus})
end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index d08057fb10a..8e63e771caa 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -61,6 +61,36 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['font_src']).to eq("'self' https://example.com")
end
end
+
+ context 'when CUSTOMER_PORTAL_URL is set' do
+ before do
+ stub_env('CUSTOMER_PORTAL_URL', 'https://customers.example.com')
+ end
+
+ context 'when in production' do
+ before do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+ end
+
+ it 'does not add CUSTOMER_PORTAL_URL to CSP' do
+ directives = settings['directives']
+
+ expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com")
+ end
+ end
+
+ context 'when in development' do
+ before do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
+ end
+
+ it 'adds CUSTOMER_PORTAL_URL to CSP' do
+ directives = settings['directives']
+
+ expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com https://customers.example.com")
+ end
+ end
+ end
end
describe '#load' do
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 2de784d3e16..0182e0f7651 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -124,4 +124,84 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
end
+
+ describe '#split_and_retry!' do
+ let!(:job) { create(:batched_background_migration_job, batch_size: 10, min_value: 6, max_value: 15, status: :failed, attempts: 3) }
+
+ context 'when job can be split' do
+ before do
+ allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
+ allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 10])
+ end
+ end
+
+ it 'sets the correct attributes' do
+ expect { job.split_and_retry! }.to change { described_class.count }.by(1)
+
+ expect(job).to have_attributes(
+ min_value: 6,
+ max_value: 10,
+ batch_size: 5,
+ status: 'failed',
+ attempts: 0,
+ started_at: nil,
+ finished_at: nil,
+ metrics: {}
+ )
+
+ new_job = described_class.last
+
+ expect(new_job).to have_attributes(
+ batched_background_migration_id: job.batched_background_migration_id,
+ min_value: 11,
+ max_value: 15,
+ batch_size: 5,
+ status: 'failed',
+ attempts: 0,
+ started_at: nil,
+ finished_at: nil,
+ metrics: {}
+ )
+ expect(new_job.created_at).not_to eq(job.created_at)
+ end
+
+ it 'splits the jobs into retriable jobs' do
+ migration = job.batched_migration
+
+ expect { job.split_and_retry! }.to change { migration.batched_jobs.retriable.count }.from(0).to(2)
+ end
+ end
+
+ context 'when job is not failed' do
+ let!(:job) { create(:batched_background_migration_job, status: :succeeded) }
+
+ it 'raises an exception' do
+ expect { job.split_and_retry! }.to raise_error 'Only failed jobs can be split'
+ end
+ end
+
+ context 'when batch size is already 1' do
+ let!(:job) { create(:batched_background_migration_job, batch_size: 1, status: :failed) }
+
+ it 'raises an exception' do
+ expect { job.split_and_retry! }.to raise_error 'Job cannot be split further'
+ end
+ end
+
+ context 'when computed midpoint is larger than the max value of the batch' do
+ before do
+ allow_next_instance_of(Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy) do |batch_class|
+ allow(batch_class).to receive(:next_batch).with(anything, anything, batch_min_value: 6, batch_size: 5).and_return([6, 16])
+ end
+ end
+
+ it 'lowers the batch size and resets the number of attempts' do
+ expect { job.split_and_retry! }.not_to change { described_class.count }
+
+ expect(job.batch_size).to eq(5)
+ expect(job.attempts).to eq(0)
+ expect(job.status).to eq('failed')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index 9f0493ab0d7..779e8e40c97 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -281,4 +281,152 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
end
end
+
+ describe '#finalize' do
+ let(:migration_wrapper) { Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper.new }
+
+ let(:migration_helpers) { ActiveRecord::Migration.new }
+ let(:table_name) { :_batched_migrations_test_table }
+ let(:column_name) { :some_id }
+ let(:job_arguments) { [:some_id, :some_id_convert_to_bigint] }
+
+ let(:migration_status) { :active }
+
+ let!(:batched_migration) do
+ create(
+ :batched_background_migration,
+ status: migration_status,
+ max_value: 8,
+ batch_size: 2,
+ sub_batch_size: 1,
+ interval: 0,
+ table_name: table_name,
+ column_name: column_name,
+ job_arguments: job_arguments,
+ pause_ms: 0
+ )
+ end
+
+ before do
+ migration_helpers.drop_table table_name, if_exists: true
+ migration_helpers.create_table table_name, id: false do |t|
+ t.integer :some_id, primary_key: true
+ t.integer :some_id_convert_to_bigint
+ end
+
+ migration_helpers.execute("INSERT INTO #{table_name} VALUES (1, 1), (2, 2), (3, NULL), (4, NULL), (5, NULL), (6, NULL), (7, NULL), (8, NULL)")
+ end
+
+ after do
+ migration_helpers.drop_table table_name, if_exists: true
+ end
+
+ context 'when the migration is not yet completed' do
+ before do
+ common_attributes = {
+ batched_migration: batched_migration,
+ batch_size: 2,
+ sub_batch_size: 1,
+ pause_ms: 0
+ }
+
+ create(:batched_background_migration_job, common_attributes.merge(status: :succeeded, min_value: 1, max_value: 2))
+ create(:batched_background_migration_job, common_attributes.merge(status: :pending, min_value: 3, max_value: 4))
+ create(:batched_background_migration_job, common_attributes.merge(status: :failed, min_value: 5, max_value: 6, attempts: 1))
+ end
+
+ it 'completes the migration' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
+ .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .and_return(batched_migration)
+
+ expect(batched_migration).to receive(:finalizing!).and_call_original
+
+ expect do
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments
+ )
+ end.to change { batched_migration.reload.status }.from('active').to('finished')
+
+ expect(batched_migration.batched_jobs).to all(be_succeeded)
+
+ not_converted = migration_helpers.execute("SELECT * FROM #{table_name} WHERE some_id_convert_to_bigint IS NULL")
+ expect(not_converted.to_a).to be_empty
+ end
+
+ context 'when migration fails to complete' do
+ it 'raises an error' do
+ batched_migration.batched_jobs.failed.update_all(attempts: Gitlab::Database::BackgroundMigration::BatchedJob::MAX_ATTEMPTS)
+
+ expect do
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments
+ )
+ end.to raise_error described_class::FailedToFinalize
+ end
+ end
+ end
+
+ context 'when the migration is already finished' do
+ let(:migration_status) { :finished }
+
+ it 'is a no-op' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
+ .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .and_return(batched_migration)
+
+ configuration = {
+ job_class_name: batched_migration.job_class_name,
+ table_name: table_name.to_sym,
+ column_name: column_name.to_sym,
+ job_arguments: job_arguments
+ }
+
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with("Batched background migration for the given configuration is already finished: #{configuration}")
+
+ expect(batched_migration).not_to receive(:finalizing!)
+
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ job_arguments
+ )
+ end
+ end
+
+ context 'when the migration does not exist' do
+ it 'is a no-op' do
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
+ .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
+ .and_return(nil)
+
+ configuration = {
+ job_class_name: batched_migration.job_class_name,
+ table_name: table_name.to_sym,
+ column_name: column_name.to_sym,
+ job_arguments: [:some, :other, :arguments]
+ }
+
+ expect(Gitlab::AppLogger).to receive(:warn)
+ .with("Could not find batched background migration for the given configuration: #{configuration}")
+
+ expect(batched_migration).not_to receive(:finalizing!)
+
+ runner.finalize(
+ batched_migration.job_class_name,
+ table_name,
+ column_name,
+ [:some, :other, :arguments]
+ )
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index d881390cd52..3207e97a639 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -10,11 +10,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#last_job' do
let!(:batched_migration) { create(:batched_background_migration) }
- let!(:batched_job1) { create(:batched_background_migration_job, batched_migration: batched_migration) }
- let!(:batched_job2) { create(:batched_background_migration_job, batched_migration: batched_migration) }
+ let!(:batched_job1) { create(:batched_background_migration_job, batched_migration: batched_migration, max_value: 1000) }
+ let!(:batched_job2) { create(:batched_background_migration_job, batched_migration: batched_migration, max_value: 500) }
- it 'returns the most recent (in order of id) batched job' do
- expect(batched_migration.last_job).to eq(batched_job2)
+ it 'returns the batched job with highest max_value' do
+ expect(batched_migration.last_job).to eq(batched_job1)
end
end
end
@@ -387,4 +387,22 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(actual).to contain_exactly(migration)
end
end
+
+ describe '.find_for_configuration' do
+ it 'returns nill if such migration does not exists' do
+ expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
+ end
+
+ it 'returns the migration when it exists' do
+ migration = create(
+ :batched_background_migration,
+ job_class_name: 'MyJobClass',
+ table_name: :projects,
+ column_name: :id,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
+
+ expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/custom_structure_spec.rb b/spec/lib/gitlab/database/custom_structure_spec.rb
deleted file mode 100644
index 04ce1e4ad9a..00000000000
--- a/spec/lib/gitlab/database/custom_structure_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::CustomStructure do
- let_it_be(:structure) { described_class.new }
- let_it_be(:filepath) { Rails.root.join(described_class::CUSTOM_DUMP_FILE) }
- let_it_be(:file_header) do
- <<~DATA
- -- this file tracks custom GitLab data, such as foreign keys referencing partitioned tables
- -- more details can be found in the issue: https://gitlab.com/gitlab-org/gitlab/-/issues/201872
- DATA
- end
-
- let(:io) { StringIO.new }
-
- before do
- allow(File).to receive(:open).with(filepath, anything).and_yield(io)
- end
-
- context 'when there are no partitioned_foreign_keys' do
- it 'dumps a valid structure file' do
- structure.dump
-
- expect(io.string).to eq("#{file_header}\n")
- end
- end
-
- context 'when there are partitioned_foreign_keys' do
- let!(:first_fk) do
- Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
- cascade_delete: true, from_table: 'issues', from_column: 'project_id', to_table: 'projects', to_column: 'id')
- end
-
- let!(:second_fk) do
- Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
- cascade_delete: false, from_table: 'issues', from_column: 'moved_to_id', to_table: 'issues', to_column: 'id')
- end
-
- it 'dumps a file with the command to restore the current keys' do
- structure.dump
-
- expect(io.string).to eq(<<~DATA)
- #{file_header}
- COPY partitioned_foreign_keys (id, cascade_delete, from_table, from_column, to_table, to_column) FROM STDIN;
- #{first_fk.id}\ttrue\tissues\tproject_id\tprojects\tid
- #{second_fk.id}\tfalse\tissues\tmoved_to_id\tissues\tid
- \\.
- DATA
-
- first_fk.destroy
- io.truncate(0)
- io.rewind
-
- structure.dump
-
- expect(io.string).to eq(<<~DATA)
- #{file_header}
- COPY partitioned_foreign_keys (id, cascade_delete, from_table, from_column, to_table, to_column) FROM STDIN;
- #{second_fk.id}\tfalse\tissues\tmoved_to_id\tissues\tid
- \\.
- DATA
- end
- end
-end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 4705bb23885..b82b8d9a311 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -306,26 +306,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe '#all_caught_up?' do
- it 'returns true if all hosts caught up to the write location' do
- expect(lb.host_list.hosts).to all(receive(:caught_up?).with('foo').and_return(true))
-
- expect(lb.all_caught_up?('foo')).to eq(true)
- end
-
- it 'returns false if a host has not yet caught up' do
- expect(lb.host_list.hosts[0]).to receive(:caught_up?)
- .with('foo')
- .and_return(true)
-
- expect(lb.host_list.hosts[1]).to receive(:caught_up?)
- .with('foo')
- .and_return(false)
-
- expect(lb.all_caught_up?('foo')).to eq(false)
- end
- end
-
describe '#retry_with_backoff' do
it 'returns the value returned by the block' do
value = lb.retry_with_backoff { 10 }
@@ -488,7 +468,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe '#select_caught_up_hosts' do
+ describe '#select_up_to_date_host' do
let(:location) { 'AB/12345'}
let(:hosts) { lb.host_list.hosts }
let(:set_host) { RequestStore[described_class::CACHE_KEY] }
diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
index 01367716518..9381ffa59fe 100644
--- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
@@ -71,6 +71,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
expect(app).to receive(:call).with(env).and_return(10)
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('web_transaction_completed.load_balancing')
+ .and_call_original
+
expect(middleware.call(env)).to eq(10)
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
index 90051172fca..54050a87af0 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
@@ -5,12 +5,27 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:middleware) { described_class.new }
+ let(:load_balancer) { double.as_null_object }
+ let(:worker_class) { 'TestDataConsistencyWorker' }
+ let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
+
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
+ end
+
after do
Gitlab::Database::LoadBalancing::Session.clear_session
end
+ def run_middleware
+ middleware.call(worker_class, job, nil, nil) {}
+ end
+
describe '#call' do
shared_context 'data consistency worker class' do |data_consistency, feature_flag|
+ let(:expected_consistency) { data_consistency }
let(:worker_class) do
Class.new do
def self.name
@@ -31,13 +46,23 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
end
+ shared_examples_for 'job data consistency' do
+ it "sets job data consistency" do
+ run_middleware
+
+ expect(job['worker_data_consistency']).to eq(expected_consistency)
+ end
+ end
+
shared_examples_for 'does not pass database locations' do
it 'does not pass database locations', :aggregate_failures do
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job['database_replica_location']).to be_nil
expect(job['database_write_location']).to be_nil
end
+
+ include_examples 'job data consistency'
end
shared_examples_for 'mark data consistency location' do |data_consistency|
@@ -45,7 +70,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:location) { '0/D525E3A8' }
- context 'when feature flag load_balancing_for_sidekiq is disabled' do
+ context 'when feature flag is disabled' do
+ let(:expected_consistency) { :always }
+
before do
stub_feature_flags(load_balancing_for_test_data_consistency_worker: false)
end
@@ -59,12 +86,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes database_replica_location' do
- expect(middleware).to receive_message_chain(:load_balancer, :host, "database_replica_location").and_return(location)
+ expect(load_balancer).to receive_message_chain(:host, "database_replica_location").and_return(location)
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job['database_replica_location']).to eq(location)
end
+
+ include_examples 'job data consistency'
end
context 'when write was performed' do
@@ -73,12 +102,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'passes primary write location', :aggregate_failures do
- expect(middleware).to receive_message_chain(:load_balancer, :primary_write_location).and_return(location)
+ expect(load_balancer).to receive(:primary_write_location).and_return(location)
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job['database_write_location']).to eq(location)
end
+
+ include_examples 'job data consistency'
end
end
@@ -89,7 +120,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
it 'does not set database locations again' do
- middleware.call(worker_class, job, double(:queue), redis_pool) { 10 }
+ run_middleware
expect(job[provided_database_location]).to eq(old_location)
expect(job[other_location]).to be_nil
@@ -101,8 +132,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", provided_database_location => old_location } }
before do
- allow(middleware).to receive_message_chain(:load_balancer, :primary_write_location).and_return(new_location)
- allow(middleware).to receive_message_chain(:load_balancer, :database_replica_location).and_return(new_location)
+ allow(load_balancer).to receive(:primary_write_location).and_return(new_location)
+ allow(load_balancer).to receive(:database_replica_location).and_return(new_location)
end
context "when write was performed" do
@@ -114,24 +145,16 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
end
end
- let(:queue) { 'default' }
- let(:redis_pool) { Sidekiq.redis_pool }
- let(:worker_class) { 'TestDataConsistencyWorker' }
- let(:job) { { "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
-
- before do
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- end
-
context 'when worker cannot be constantized' do
let(:worker_class) { 'ActionMailer::MailDeliveryJob' }
+ let(:expected_consistency) { :always }
include_examples 'does not pass database locations'
end
context 'when worker class does not include ApplicationWorker' do
let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper }
+ let(:expected_consistency) { :always }
include_examples 'does not pass database locations'
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index b7cd0caa922..14f240cd159 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -5,6 +5,19 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
let(:middleware) { described_class.new }
+ let(:load_balancer) { double.as_null_object }
+
+ let(:worker) { worker_class.new }
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } }
+
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
+
+ replication_lag!(false)
+ end
+
after do
Gitlab::Database::LoadBalancing::Session.clear_session
end
@@ -31,30 +44,34 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
end
end
- shared_examples_for 'stick to the primary' do
+ shared_examples_for 'load balancing strategy' do |strategy|
+ it "sets load balancing strategy to #{strategy}" do
+ run_middleware do
+ expect(job['load_balancing_strategy']).to eq(strategy)
+ end
+ end
+ end
+
+ shared_examples_for 'stick to the primary' do |expected_strategy|
it 'sticks to the primary' do
- middleware.call(worker, job, double(:queue)) do
+ run_middleware do
expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).to be_truthy
end
end
+
+ include_examples 'load balancing strategy', expected_strategy
end
- shared_examples_for 'replica is up to date' do |location, data_consistency|
+ shared_examples_for 'replica is up to date' do |location, expected_strategy|
it 'does not stick to the primary', :aggregate_failures do
expect(middleware).to receive(:replica_caught_up?).with(location).and_return(true)
- middleware.call(worker, job, double(:queue)) do
+ run_middleware do
expect(Gitlab::Database::LoadBalancing::Session.current.use_primary?).not_to be_truthy
end
-
- expect(job[:database_chosen]).to eq('replica')
end
- it "updates job hash with data_consistency :#{data_consistency}" do
- middleware.call(worker, job, double(:queue)) do
- expect(job).to include(data_consistency: data_consistency.to_s)
- end
- end
+ include_examples 'load balancing strategy', expected_strategy
end
shared_examples_for 'sticks based on data consistency' do |data_consistency|
@@ -65,7 +82,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
stub_feature_flags(load_balancing_for_test_data_consistency_worker: false)
end
- include_examples 'stick to the primary'
+ include_examples 'stick to the primary', 'primary'
end
context 'when database replica location is set' do
@@ -75,7 +92,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
allow(middleware).to receive(:replica_caught_up?).and_return(true)
end
- it_behaves_like 'replica is up to date', '0/D525E3A8', data_consistency
+ it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica'
end
context 'when database primary location is set' do
@@ -85,39 +102,26 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
allow(middleware).to receive(:replica_caught_up?).and_return(true)
end
- it_behaves_like 'replica is up to date', '0/D525E3A8', data_consistency
+ it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica'
end
context 'when database location is not set' do
let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e' } }
- it_behaves_like 'stick to the primary', nil
+ it_behaves_like 'stick to the primary', 'primary_no_wal'
end
end
- let(:queue) { 'default' }
- let(:redis_pool) { Sidekiq.redis_pool }
- let(:worker) { worker_class.new }
- let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'database_replica_location' => '0/D525E3A8' } }
- let(:block) { 10 }
-
- before do
- skip_feature_flags_yaml_validation
- skip_default_enabled_yaml_check
- allow(middleware).to receive(:clear)
- allow(Gitlab::Database::LoadBalancing::Session.current).to receive(:performed_write?).and_return(true)
- end
-
context 'when worker class does not include ApplicationWorker' do
let(:worker) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper.new }
- include_examples 'stick to the primary'
+ include_examples 'stick to the primary', 'primary'
end
context 'when worker data consistency is :always' do
include_context 'data consistency worker class', :always, :load_balancing_for_test_data_consistency_worker
- include_examples 'stick to the primary'
+ include_examples 'stick to the primary', 'primary'
end
context 'when worker data consistency is :delayed' do
@@ -125,8 +129,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
context 'when replica is not up to date' do
before do
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :release_host)
- allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :select_up_to_date_host).and_return(false)
+ replication_lag!(true)
end
around do |example|
@@ -137,38 +140,34 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
end
context 'when job is executed first' do
- it 'raise an error and retries', :aggregate_failures do
+ it 'raises an error and retries', :aggregate_failures do
expect do
process_job(job)
end.to raise_error(Sidekiq::JobRetry::Skip)
expect(job['error_class']).to eq('Gitlab::Database::LoadBalancing::SidekiqServerMiddleware::JobReplicaNotUpToDate')
- expect(job[:database_chosen]).to eq('retry')
end
+
+ include_examples 'load balancing strategy', 'retry'
end
context 'when job is retried' do
- it 'stick to the primary', :aggregate_failures do
+ before do
expect do
process_job(job)
end.to raise_error(Sidekiq::JobRetry::Skip)
-
- process_job(job)
- expect(job[:database_chosen]).to eq('primary')
end
- end
- context 'replica selection mechanism feature flag rollout' do
- before do
- stub_feature_flags(sidekiq_load_balancing_rotate_up_to_date_replica: false)
+ context 'and replica still lagging behind' do
+ include_examples 'stick to the primary', 'primary'
end
- it 'uses different implmentation' do
- expect(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer, :host, :caught_up?).and_return(false)
+ context 'and replica is now up-to-date' do
+ before do
+ replication_lag!(false)
+ end
- expect do
- process_job(job)
- end.to raise_error(Sidekiq::JobRetry::Skip)
+ it_behaves_like 'replica is up to date', '0/D525E3A8', 'replica_retried'
end
end
end
@@ -182,20 +181,24 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware do
allow(middleware).to receive(:replica_caught_up?).and_return(false)
end
- include_examples 'stick to the primary'
-
- it 'updates job hash with primary database chosen', :aggregate_failures do
- expect { |b| middleware.call(worker, job, double(:queue), &b) }.to yield_control
-
- expect(job[:database_chosen]).to eq('primary')
- end
+ include_examples 'stick to the primary', 'primary'
end
end
end
def process_job(job)
- Sidekiq::JobRetry.new.local(worker_class, job, queue) do
+ Sidekiq::JobRetry.new.local(worker_class, job, 'default') do
worker_class.process_job(job)
end
end
+
+ def run_middleware
+ middleware.call(worker, job, double(:queue)) { yield }
+ rescue described_class::JobReplicaNotUpToDate
+ # we silence errors here that cause the job to retry
+ end
+
+ def replication_lag!(exists)
+ allow(load_balancer).to receive(:select_up_to_date_host).and_return(!exists)
+ end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index bf4e3756e0e..53445d73756 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -46,41 +46,68 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
describe '.all_caught_up?' do
let(:lb) { double(:lb) }
+ let(:last_write_location) { 'foo' }
before do
allow(described_class).to receive(:load_balancer).and_return(lb)
- end
- it 'returns true if no write location could be found' do
allow(described_class).to receive(:last_write_location_for)
.with(:user, 42)
- .and_return(nil)
+ .and_return(last_write_location)
+ end
+
+ context 'when no write location could be found' do
+ let(:last_write_location) { nil }
- expect(lb).not_to receive(:all_caught_up?)
+ it 'returns true' do
+ allow(described_class).to receive(:last_write_location_for)
+ .with(:user, 42)
+ .and_return(nil)
+
+ expect(lb).not_to receive(:select_up_to_date_host)
- expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ end
end
- it 'returns true, and unsticks if all secondaries have caught up' do
- allow(described_class).to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
+ context 'when all secondaries have caught up' do
+ before do
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
+ end
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(true)
+ it 'returns true, and unsticks' do
+ expect(described_class).to receive(:unstick).with(:user, 42)
- expect(described_class).to receive(:unstick).with(:user, 42)
+ expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ end
+
+ it 'notifies with the proper event payload' do
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: true })
+ .and_call_original
- expect(described_class.all_caught_up?(:user, 42)).to eq(true)
+ described_class.all_caught_up?(:user, 42)
+ end
end
- it 'return false if the secondaries have not yet caught up' do
- allow(described_class).to receive(:last_write_location_for)
- .with(:user, 42)
- .and_return('foo')
+ context 'when the secondaries have not yet caught up' do
+ before do
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
+ end
+
+ it 'returns false' do
+ expect(described_class.all_caught_up?(:user, 42)).to eq(false)
+ end
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(false)
+ it 'notifies with the proper event payload' do
+ expect(ActiveSupport::Notifications)
+ .to receive(:instrument)
+ .with('caught_up_replica_pick.load_balancing', { result: false })
+ .and_call_original
- expect(described_class.all_caught_up?(:user, 42)).to eq(false)
+ described_class.all_caught_up?(:user, 42)
+ end
end
end
@@ -96,7 +123,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with(:user, 42)
.and_return(nil)
- expect(lb).not_to receive(:all_caught_up?)
+ expect(lb).not_to receive(:select_up_to_date_host)
described_class.unstick_or_continue_sticking(:user, 42)
end
@@ -106,7 +133,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with(:user, 42)
.and_return('foo')
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(true)
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(true)
expect(described_class).to receive(:unstick).with(:user, 42)
@@ -118,7 +145,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
.with(:user, 42)
.and_return('foo')
- allow(lb).to receive(:all_caught_up?).with('foo').and_return(false)
+ allow(lb).to receive(:select_up_to_date_host).with('foo').and_return(false)
expect(Gitlab::Database::LoadBalancing::Session.current)
.to receive(:use_primary!)
@@ -298,10 +325,22 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
end
it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
- expect(lb).to receive(:select_caught_up_hosts).and_return(true)
+ expect(lb).to receive(:select_up_to_date_host).and_return(true)
expect(described_class).to receive(:unstick).with(:project, 42)
expect(described_class.select_caught_up_replicas(:project, 42)).to be true
end
+
+ context 'when :load_balancing_refine_load_balancer_methods FF is disabled' do
+ before do
+ stub_feature_flags(load_balancing_refine_load_balancer_methods: false)
+ end
+
+ it 'returns true, selects hosts, and unsticks if any secondary has caught up' do
+ expect(lb).to receive(:select_caught_up_hosts).and_return(true)
+ expect(described_class).to receive(:unstick).with(:project, 42)
+ expect(described_class.select_caught_up_replicas(:project, 42)).to be true
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index e7de7f2b43b..94717a10492 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -142,10 +142,10 @@ RSpec.describe Gitlab::Database::LoadBalancing do
expect(described_class.enable?).to eq(false)
end
- it 'returns false when Sidekiq is being used' do
+ it 'returns true when Sidekiq is being used' do
allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- expect(described_class.enable?).to eq(false)
+ expect(described_class.enable?).to eq(true)
end
it 'returns false when running inside a Rake task' do
@@ -170,18 +170,6 @@ RSpec.describe Gitlab::Database::LoadBalancing do
expect(described_class.enable?).to eq(true)
end
-
- context 'when ENABLE_LOAD_BALANCING_FOR_SIDEKIQ environment variable is set' do
- before do
- stub_env('ENABLE_LOAD_BALANCING_FOR_SIDEKIQ', 'true')
- end
-
- it 'returns true when Sidekiq is being used' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
-
- expect(described_class.enable?).to eq(true)
- end
- end
end
describe '.configured?' do
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index f0ea07646fb..8e25f9249fe 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -379,6 +379,37 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:transaction_open?).and_return(false)
end
+ context 'target column' do
+ it 'defaults to (id) when no custom target column is provided' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+
+ expect(model).to receive(:execute).with(/REFERENCES users \(id\)/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id)
+ end
+
+ it 'references the custom taget column when provided' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+
+ expect(model).to receive(:execute).with(/REFERENCES users \(id_convert_to_bigint\)/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id,
+ target_column: :id_convert_to_bigint)
+ end
+ end
+
context 'ON DELETE statements' do
context 'on_delete: :nullify' do
it 'appends ON DELETE SET NULL statement' do
@@ -450,7 +481,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
column: :user_id,
on_delete: :cascade,
- name: name).and_return(true)
+ name: name,
+ primary_key: :id).and_return(true)
expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
expect(model).to receive(:execute).with(/VALIDATE CONSTRAINT/)
@@ -479,6 +511,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'does not create a new foreign key' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
name: :foo,
+ primary_key: :id,
on_delete: :cascade,
column: :user_id).and_return(true)
@@ -583,7 +616,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#foreign_key_exists?' do
before do
- key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(:projects, :users, { column: :non_standard_id, name: :fk_projects_users_non_standard_id, on_delete: :cascade })
+ key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
+ :projects, :users,
+ {
+ column: :non_standard_id,
+ name: :fk_projects_users_non_standard_id,
+ on_delete: :cascade,
+ primary_key: :id
+ }
+ )
allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
end
@@ -612,6 +653,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model.foreign_key_exists?(:projects, target_table, column: :user_id)).to be_falsey
end
+ it 'compares by target column name if given' do
+ expect(model.foreign_key_exists?(:projects, target_table, primary_key: :user_id)).to be_falsey
+ expect(model.foreign_key_exists?(:projects, target_table, primary_key: :id)).to be_truthy
+ end
+
it 'compares by foreign key name if given' do
expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name)).to be_falsey
end
@@ -2007,7 +2053,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: :events,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [["id"], ["id_convert_to_bigint"]]
}
end
@@ -2017,7 +2063,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
create(:batched_background_migration, configuration.merge(status: :active))
expect { ensure_batched_background_migration_is_finished }
- .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active': #{configuration}"
+ .to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':" \
+ "\t#{configuration}" \
+ "\n\n" \
+ "Finalize it manualy by running" \
+ "\n\n" \
+ "\tsudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"id\"]\\, [\"id_convert_to_bigint\"]]']" \
+ "\n\n" \
+ "For more information, check the documentation" \
+ "\n\n" \
+ "\thttps://docs.gitlab.com/ee/user/admin_area/monitoring/background_migrations.html#database-migrations-failing-because-of-batched-background-migration-not-finished"
end
it 'does not raise error when migration exists and is marked as finished' do
@@ -2153,21 +2208,41 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
buffer.rewind
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(raise_on_exhaustion: [true, false])
+
+ with_them do
+ it 'sets raise_on_exhaustion as requested' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) { }
+ end
+ end
+
+ it 'does not raise on exhaustion by default' do
+ with_lock_retries = double
+ expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
+ expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
+
+ model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ end
end
describe '#backfill_iids' do
include MigrationsHelpers
- before do
- stub_const('Issue', Class.new(ActiveRecord::Base))
-
- Issue.class_eval do
+ let(:issue_class) do
+ Class.new(ActiveRecord::Base) do
include AtomicInternalId
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
- belongs_to :project, class_name: "::Project"
+ belongs_to :project, class_name: "::Project", inverse_of: nil
has_internal_id :iid,
scope: :project,
@@ -2190,7 +2265,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue = Issue.create!(project_id: project.id)
+ issue = issue_class.create!(project_id: project.id)
expect(issue.iid).to eq(1)
end
@@ -2201,7 +2276,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_b = Issue.create!(project_id: project.id)
+ issue_b = issue_class.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.iid).to eq(2)
@@ -2216,8 +2291,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_a = Issue.create!(project_id: project_a.id)
- issue_b = Issue.create!(project_id: project_b.id)
+ issue_a = issue_class.create!(project_id: project_a.id)
+ issue_b = issue_class.create!(project_id: project_b.id)
expect(issue_a.iid).to eq(2)
expect(issue_b.iid).to eq(3)
@@ -2231,7 +2306,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_b = Issue.create!(project_id: project_b.id)
+ issue_b = issue_class.create!(project_id: project_b.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(1)
@@ -2951,4 +3026,12 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
end
+
+ describe '#rename_constraint' do
+ it "executes the statement to rename constraint" do
+ expect(model).to receive(:execute).with /ALTER TABLE "test_table"\nRENAME CONSTRAINT "fk_old_name" TO "fk_new_name"/
+
+ model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index 885eef5723e..f9dca371398 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -71,6 +71,18 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
model.create!(created_at: Date.parse('2020-06-15'))
end
+ context 'when pruning partitions before June 2020' do
+ subject { described_class.new(model, partitioning_key, retain_for: 1.month).missing_partitions }
+
+ it 'does not include the missing partition from May 2020 because it would be dropped' do
+ expect(subject).not_to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01'))
+ end
+
+ it 'detects the missing partition for 1 month ago (July 2020)' do
+ expect(subject).to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-07-01', '2020-08-01'))
+ end
+ end
+
it 'detects the gap and the missing partition in May 2020' do
expect(subject).to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01'))
end
@@ -108,6 +120,19 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
SQL
end
+ context 'when pruning partitions before June 2020' do
+ subject { described_class.new(model, partitioning_key, retain_for: 1.month).missing_partitions }
+
+ it 'detects exactly the set of partitions from June 2020 to March 2021' do
+ months = %w[2020-07-01 2020-08-01 2020-09-01 2020-10-01 2020-11-01 2020-12-01 2021-01-01 2021-02-01 2021-03-01]
+ expected = months[..-2].zip(months.drop(1)).map do |(from, to)|
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, from, to)
+ end
+
+ expect(subject).to match_array(expected)
+ end
+ end
+
it 'detects the missing catch-all partition at the beginning' do
expect(subject).to include(Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-08-01'))
end
@@ -150,4 +175,100 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
end
end
end
+
+ describe '#extra_partitions' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'partitioned_test'
+ self.primary_key = :id
+ end
+ end
+
+ let(:partitioning_key) { :created_at }
+ let(:table_name) { :partitioned_test }
+
+ around do |example|
+ travel_to(Date.parse('2020-08-22')) { example.run }
+ end
+
+ describe 'with existing partitions' do
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{table_name}
+ (id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
+ PARTITION BY RANGE (created_at);
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000
+ PARTITION OF #{table_name}
+ FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202005
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('2020-05-01') TO ('2020-06-01');
+
+ CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006
+ PARTITION OF #{table_name}
+ FOR VALUES FROM ('2020-06-01') TO ('2020-07-01')
+ SQL
+ end
+
+ context 'without a time retention policy' do
+ subject { described_class.new(model, partitioning_key).extra_partitions }
+
+ it 'has no extra partitions to prune' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'with a time retention policy that excludes no partitions' do
+ subject { described_class.new(model, partitioning_key, retain_for: 4.months).extra_partitions }
+
+ it 'has no extra partitions to prune' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'with a time retention policy of 3 months' do
+ subject { described_class.new(model, partitioning_key, retain_for: 3.months).extra_partitions }
+
+ it 'prunes the unbounded partition ending 2020-05-01' do
+ min_value_to_may = Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01',
+ partition_name: 'partitioned_test_000000')
+
+ expect(subject).to contain_exactly(min_value_to_may)
+ end
+
+ context 'when the feature flag is toggled off' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: false)
+ end
+
+ it 'is empty' do
+ expect(subject).to eq([])
+ end
+ end
+ end
+
+ context 'with a time retention policy of 2 months' do
+ subject { described_class.new(model, partitioning_key, retain_for: 2.months).extra_partitions }
+
+ it 'prunes the unbounded partition and the partition for May-June' do
+ expect(subject).to contain_exactly(
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'),
+ Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005')
+ )
+ end
+
+ context 'when the feature flag is toggled off' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: false)
+ end
+
+ it 'is empty' do
+ expect(subject).to eq([])
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb b/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb
deleted file mode 100644
index ec89f2ed61c..00000000000
--- a/spec/lib/gitlab/database/partitioning/partition_creator_spec.rb
+++ /dev/null
@@ -1,96 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::Partitioning::PartitionCreator do
- include Database::PartitioningHelpers
- include ExclusiveLeaseHelpers
-
- describe '.register' do
- let(:model) { double(partitioning_strategy: nil) }
-
- it 'remembers registered models' do
- expect { described_class.register(model) }.to change { described_class.models }.to include(model)
- end
- end
-
- describe '#create_partitions (mocked)' do
- subject { described_class.new(models).create_partitions }
-
- let(:models) { [model] }
- let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) }
- let(:partitioning_strategy) { double(missing_partitions: partitions) }
- let(:table) { "some_table" }
-
- before do
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
- allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
- allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
-
- stub_exclusive_lease(described_class::LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
- end
-
- let(:partitions) do
- [
- instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo', to_sql: "SELECT 1"),
- instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo2', to_sql: "SELECT 2")
- ]
- end
-
- it 'creates the partition' do
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
-
- subject
- end
-
- context 'error handling with 2 models' do
- let(:models) do
- [
- double(partitioning_strategy: strategy1, table_name: table),
- double(partitioning_strategy: strategy2, table_name: table)
- ]
- end
-
- let(:strategy1) { double('strategy1', missing_partitions: nil) }
- let(:strategy2) { double('strategy2', missing_partitions: partitions) }
-
- it 'still creates partitions for the second table' do
- expect(strategy1).to receive(:missing_partitions).and_raise('this should never happen (tm)')
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
- expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
-
- subject
- end
- end
- end
-
- describe '#create_partitions' do
- subject { described_class.new([my_model]).create_partitions }
-
- let(:connection) { ActiveRecord::Base.connection }
- let(:my_model) do
- Class.new(ApplicationRecord) do
- include PartitionedTable
-
- self.table_name = 'my_model_example_table'
-
- partitioned_by :created_at, strategy: :monthly
- end
- end
-
- before do
- connection.execute(<<~SQL)
- CREATE TABLE my_model_example_table
- (id serial not null, created_at timestamptz not null, primary key (id, created_at))
- PARTITION BY RANGE (created_at);
- SQL
- end
-
- it 'creates partitions' do
- expect { subject }.to change { find_partitions(my_model.table_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA).size }.from(0)
-
- subject
- end
- end
-end
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
new file mode 100644
index 00000000000..903a41d6dd2
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
+ include Database::PartitioningHelpers
+ include Database::TableSchemaHelpers
+ include ExclusiveLeaseHelpers
+
+ describe '.register' do
+ let(:model) { double(partitioning_strategy: nil) }
+
+ it 'remembers registered models' do
+ expect { described_class.register(model) }.to change { described_class.models }.to include(model)
+ end
+ end
+
+ context 'creating partitions (mocked)' do
+ subject(:sync_partitions) { described_class.new(models).sync_partitions }
+
+ let(:models) { [model] }
+ let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) }
+ let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: []) }
+ let(:table) { "some_table" }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
+ allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
+
+ stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
+ end
+
+ let(:partitions) do
+ [
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo', to_sql: "SELECT 1"),
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: 'bar', partition_name: 'foo2', to_sql: "SELECT 2")
+ ]
+ end
+
+ it 'creates the partition' do
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
+
+ sync_partitions
+ end
+
+ context 'error handling with 2 models' do
+ let(:models) do
+ [
+ double(partitioning_strategy: strategy1, table_name: table),
+ double(partitioning_strategy: strategy2, table_name: table)
+ ]
+ end
+
+ let(:strategy1) { double('strategy1', missing_partitions: nil, extra_partitions: []) }
+ let(:strategy2) { double('strategy2', missing_partitions: partitions, extra_partitions: []) }
+
+ it 'still creates partitions for the second table' do
+ expect(strategy1).to receive(:missing_partitions).and_raise('this should never happen (tm)')
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.first.to_sql)
+ expect(ActiveRecord::Base.connection).to receive(:execute).with(partitions.second.to_sql)
+
+ sync_partitions
+ end
+ end
+ end
+
+ context 'creating partitions' do
+ subject(:sync_partitions) { described_class.new([my_model]).sync_partitions }
+
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:my_model) do
+ Class.new(ApplicationRecord) do
+ include PartitionedTable
+
+ self.table_name = 'my_model_example_table'
+
+ partitioned_by :created_at, strategy: :monthly
+ end
+ end
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE my_model_example_table
+ (id serial not null, created_at timestamptz not null, primary key (id, created_at))
+ PARTITION BY RANGE (created_at);
+ SQL
+ end
+
+ it 'creates partitions' do
+ expect { sync_partitions }.to change { find_partitions(my_model.table_name, schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA).size }.from(0)
+ end
+ end
+
+ context 'detaching partitions (mocked)' do
+ subject(:sync_partitions) { manager.sync_partitions }
+
+ let(:manager) { described_class.new(models) }
+ let(:models) { [model] }
+ let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table)}
+ let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: []) }
+ let(:table) { "foo" }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).and_call_original
+ allow(ActiveRecord::Base.connection).to receive(:table_exists?).with(table).and_return(true)
+
+ stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
+ end
+
+ let(:extra_partitions) do
+ [
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo1'),
+ instance_double(Gitlab::Database::Partitioning::TimePartition, table: table, partition_name: 'foo2')
+ ]
+ end
+
+ context 'with the partition_pruning_dry_run feature flag enabled' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: true)
+ end
+
+ it 'detaches each extra partition' do
+ extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) }
+
+ sync_partitions
+ end
+
+ context 'error handling' do
+ let(:models) do
+ [
+ double(partitioning_strategy: error_strategy, table_name: table),
+ model
+ ]
+ end
+
+ let(:error_strategy) { double(extra_partitions: nil, missing_partitions: []) }
+
+ it 'still drops partitions for the other model' do
+ expect(error_strategy).to receive(:extra_partitions).and_raise('injected error!')
+ extra_partitions.each { |p| expect(manager).to receive(:detach_one_partition).with(p) }
+
+ sync_partitions
+ end
+ end
+ end
+
+ context 'with the partition_pruning_dry_run feature flag disabled' do
+ before do
+ stub_feature_flags(partition_pruning_dry_run: false)
+ end
+
+ it 'returns immediately' do
+ expect(manager).not_to receive(:detach)
+
+ sync_partitions
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index 83f2436043c..a524fe681e9 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -3,192 +3,142 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers do
- include Database::TriggerHelpers
+ include Database::TableSchemaHelpers
- let(:model) do
- ActiveRecord::Migration.new.extend(described_class)
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::PartitioningMigrationHelpers)
end
- let_it_be(:connection) { ActiveRecord::Base.connection }
-
- let(:referenced_table) { :issues }
- let(:function_name) { '_test_partitioned_foreign_keys_function' }
- let(:trigger_name) { '_test_partitioned_foreign_keys_trigger' }
+ let(:source_table_name) { '_test_partitioned_table' }
+ let(:target_table_name) { '_test_referenced_table' }
+ let(:column_name) { "#{target_table_name}_id" }
+ let(:foreign_key_name) { '_test_partitioned_fk' }
+ let(:partition_schema) { 'gitlab_partitions_dynamic' }
+ let(:partition1_name) { "#{partition_schema}.#{source_table_name}_202001" }
+ let(:partition2_name) { "#{partition_schema}.#{source_table_name}_202002" }
+ let(:options) do
+ {
+ column: column_name,
+ name: foreign_key_name,
+ on_delete: :cascade,
+ validate: true
+ }
+ end
before do
- allow(model).to receive(:puts)
- allow(model).to receive(:fk_function_name).and_return(function_name)
- allow(model).to receive(:fk_trigger_name).and_return(trigger_name)
+ allow(migration).to receive(:puts)
+
+ connection.execute(<<~SQL)
+ CREATE TABLE #{target_table_name} (
+ id serial NOT NULL,
+ PRIMARY KEY (id)
+ );
+
+ CREATE TABLE #{source_table_name} (
+ id serial NOT NULL,
+ #{column_name} int NOT NULL,
+ created_at timestamptz NOT NULL,
+ PRIMARY KEY (id, created_at)
+ ) PARTITION BY RANGE (created_at);
+
+ CREATE TABLE #{partition1_name} PARTITION OF #{source_table_name}
+ FOR VALUES FROM ('2020-01-01') TO ('2020-02-01');
+
+ CREATE TABLE #{partition2_name} PARTITION OF #{source_table_name}
+ FOR VALUES FROM ('2020-02-01') TO ('2020-03-01');
+ SQL
end
- describe 'adding a foreign key' do
+ describe '#add_concurrent_partitioned_foreign_key' do
before do
- allow(model).to receive(:transaction_open?).and_return(false)
- end
-
- context 'when the table has no foreign keys' do
- it 'creates a trigger function to handle the single cascade' do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
-
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
-
- context 'when the table already has foreign keys' do
- context 'when the foreign key is from a different table' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- end
-
- it 'creates a trigger function to handle the multiple cascades' do
- model.add_partitioned_foreign_key :epic_issues, referenced_table
-
- expect_function_to_contain(function_name,
- 'delete from issue_assignees where issue_id = old.id',
- 'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
-
- context 'when the foreign key is from the same table' do
- before do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
- end
-
- context 'when the foreign key is from a different column' do
- it 'creates a trigger function to handle the multiple cascades' do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :duplicated_to_id
-
- expect_function_to_contain(function_name,
- 'delete from issues where moved_to_id = old.id',
- 'delete from issues where duplicated_to_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
-
- context 'when the foreign key is from the same column' do
- it 'ignores the duplicate and properly recreates the trigger function' do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
-
- expect_function_to_contain(function_name, 'delete from issues where moved_to_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
- end
- end
+ allow(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, anything)
+ .and_return(false)
- context 'when the foreign key is set to nullify' do
- it 'creates a trigger function that nullifies the foreign key' do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table, on_delete: :nullify
-
- expect_function_to_contain(function_name, 'update issue_assignees set issue_id = null where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
+ allow(migration).to receive(:with_lock_retries).and_yield
end
- context 'when the referencing column is a custom value' do
- it 'creates a trigger function with the correct column name' do
- model.add_partitioned_foreign_key :issues, referenced_table, column: :duplicated_to_id
+ context 'when the foreign key does not exist on the parent table' do
+ it 'creates the foreign key on each partition, and the parent table' do
+ expect(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, **options)
+ .and_return(false)
- expect_function_to_contain(function_name, 'delete from issues where duplicated_to_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
+ expect(migration).to receive(:concurrent_partitioned_foreign_key_name).and_return(foreign_key_name)
- context 'when the referenced column is a custom value' do
- let(:referenced_table) { :user_details }
+ expect_add_concurrent_fk_and_call_original(partition1_name, target_table_name, **options)
+ expect_add_concurrent_fk_and_call_original(partition2_name, target_table_name, **options)
- it 'creates a trigger function with the correct column name' do
- model.add_partitioned_foreign_key :user_preferences, referenced_table, column: :user_id, primary_key: :user_id
+ expect(migration).to receive(:with_lock_retries).ordered.and_yield
+ expect(migration).to receive(:add_foreign_key)
+ .with(source_table_name, target_table_name, **options)
+ .ordered
+ .and_call_original
- expect_function_to_contain(function_name, 'delete from user_preferences where user_id = old.user_id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
- end
- end
+ migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name, column: column_name)
- context 'when the given key definition is invalid' do
- it 'raises an error with the appropriate message' do
- expect do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table, column: :not_a_real_issue_id
- end.to raise_error(/From column must be a valid column/)
+ expect_foreign_key_to_exist(source_table_name, foreign_key_name)
end
- end
-
- context 'when run inside a transaction' do
- it 'raises an error' do
- expect(model).to receive(:transaction_open?).and_return(true)
- expect do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- end.to raise_error(/can not be run inside a transaction/)
+ def expect_add_concurrent_fk_and_call_original(source_table_name, target_table_name, options)
+ expect(migration).to receive(:add_concurrent_foreign_key)
+ .ordered
+ .with(source_table_name, target_table_name, options)
+ .and_wrap_original do |_, source_table_name, target_table_name, options|
+ connection.add_foreign_key(source_table_name, target_table_name, **options)
+ end
end
end
- end
- context 'removing a foreign key' do
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
- end
+ context 'when the foreign key exists on the parent table' do
+ it 'does not attempt to create any foreign keys' do
+ expect(migration).to receive(:concurrent_partitioned_foreign_key_name).and_return(foreign_key_name)
- context 'when the table has multiple foreign keys' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- model.add_partitioned_foreign_key :epic_issues, referenced_table
- end
+ expect(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, **options)
+ .and_return(true)
- it 'creates a trigger function without the removed cascade' do
- expect_function_to_contain(function_name,
- 'delete from issue_assignees where issue_id = old.id',
- 'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ expect(migration).not_to receive(:add_concurrent_foreign_key)
+ expect(migration).not_to receive(:with_lock_retries)
+ expect(migration).not_to receive(:add_foreign_key)
- model.remove_partitioned_foreign_key :issue_assignees, referenced_table
+ migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name, column: column_name)
- expect_function_to_contain(function_name, 'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ expect_foreign_key_not_to_exist(source_table_name, foreign_key_name)
end
end
- context 'when the table has only one remaining foreign key' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
+ context 'when additional foreign key options are given' do
+ let(:options) do
+ {
+ column: column_name,
+ name: '_my_fk_name',
+ on_delete: :restrict,
+ validate: true
+ }
end
- it 'removes the trigger function altogether' do
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
-
- model.remove_partitioned_foreign_key :issue_assignees, referenced_table
-
- expect_function_not_to_exist(function_name)
- expect_trigger_not_to_exist(referenced_table, trigger_name)
- end
- end
+ it 'forwards them to the foreign key helper methods' do
+ expect(migration).to receive(:foreign_key_exists?)
+ .with(source_table_name, target_table_name, **options)
+ .and_return(false)
- context 'when the foreign key does not exist' do
- before do
- model.add_partitioned_foreign_key :issue_assignees, referenced_table
- end
+ expect(migration).not_to receive(:concurrent_partitioned_foreign_key_name)
- it 'ignores the invalid key and properly recreates the trigger function' do
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ expect_add_concurrent_fk(partition1_name, target_table_name, **options)
+ expect_add_concurrent_fk(partition2_name, target_table_name, **options)
- model.remove_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
+ expect(migration).to receive(:with_lock_retries).ordered.and_yield
+ expect(migration).to receive(:add_foreign_key).with(source_table_name, target_table_name, **options).ordered
- expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
+ migration.add_concurrent_partitioned_foreign_key(source_table_name, target_table_name,
+ column: column_name, name: '_my_fk_name', on_delete: :restrict)
end
- end
-
- context 'when run outside a transaction' do
- it 'raises an error' do
- expect(model).to receive(:transaction_open?).and_return(true)
- expect do
- model.remove_partitioned_foreign_key :issue_assignees, referenced_table
- end.to raise_error(/can not be run inside a transaction/)
+ def expect_add_concurrent_fk(source_table_name, target_table_name, options)
+ expect(migration).to receive(:add_concurrent_foreign_key)
+ .ordered
+ .with(source_table_name, target_table_name, options)
end
end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb
deleted file mode 100644
index a58c37f111d..00000000000
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/partitioned_foreign_key_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey do
- let(:foreign_key) do
- described_class.new(
- to_table: 'issues',
- from_table: 'issue_assignees',
- from_column: 'issue_id',
- to_column: 'id',
- cascade_delete: true)
- end
-
- describe 'validations' do
- it 'allows keys that reference valid tables and columns' do
- expect(foreign_key).to be_valid
- end
-
- it 'does not allow keys without a valid to_table' do
- foreign_key.to_table = 'this_is_not_a_real_table'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:to_table].first).to eq('must be a valid table')
- end
-
- it 'does not allow keys without a valid from_table' do
- foreign_key.from_table = 'this_is_not_a_real_table'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:from_table].first).to eq('must be a valid table')
- end
-
- it 'does not allow keys without a valid to_column' do
- foreign_key.to_column = 'this_is_not_a_real_fk'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:to_column].first).to eq('must be a valid column')
- end
-
- it 'does not allow keys without a valid from_column' do
- foreign_key.from_column = 'this_is_not_a_real_pk'
-
- expect(foreign_key).not_to be_valid
- expect(foreign_key.errors[:from_column].first).to eq('must be a valid column')
- end
- end
-end
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
index 2fda9b85c5a..e1832219ebf 100644
--- a/spec/lib/gitlab/database/postgres_index_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -22,17 +22,23 @@ RSpec.describe Gitlab::Database::PostgresIndex do
it_behaves_like 'a postgres model'
- describe '.regular' do
- it 'only non-unique indexes' do
- expect(described_class.regular).to all(have_attributes(unique: false))
- end
-
+ describe '.reindexing_support' do
it 'only non partitioned indexes' do
- expect(described_class.regular).to all(have_attributes(partitioned: false))
+ expect(described_class.reindexing_support).to all(have_attributes(partitioned: false))
end
it 'only indexes that dont serve an exclusion constraint' do
- expect(described_class.regular).to all(have_attributes(exclusion: false))
+ expect(described_class.reindexing_support).to all(have_attributes(exclusion: false))
+ end
+
+ it 'only non-expression indexes' do
+ expect(described_class.reindexing_support).to all(have_attributes(expression: false))
+ end
+
+ it 'only btree and gist indexes' do
+ types = described_class.reindexing_support.map(&:type).uniq
+
+ expect(types & %w(btree gist)).to eq(types)
end
end
@@ -67,6 +73,34 @@ RSpec.describe Gitlab::Database::PostgresIndex do
end
end
+ describe '#relative_bloat_level' do
+ subject { build(:postgres_index, bloat_estimate: bloat_estimate, ondisk_size_bytes: 1024) }
+
+ let(:bloat_estimate) { build(:postgres_index_bloat_estimate, bloat_size: 256) }
+
+ it 'calculates the relative bloat level' do
+ expect(subject.relative_bloat_level).to eq(0.25)
+ end
+ end
+
+ describe '#reset' do
+ subject { index.reset }
+
+ let(:index) { described_class.by_identifier(identifier) }
+
+ it 'calls #reload' do
+ expect(index).to receive(:reload).once.and_call_original
+
+ subject
+ end
+
+ it 'resets the bloat estimation' do
+ expect(index).to receive(:clear_memoization).with(:bloat_size).and_call_original
+
+ subject
+ end
+ end
+
describe '#unique?' do
it 'returns true for a unique index' do
expect(find('public.bar_key')).to be_unique
diff --git a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
index ca9f4af9187..40e36bc02e9 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
@@ -3,33 +3,27 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PostgresqlAdapter::DumpSchemaVersionsMixin do
- let(:schema_migration) { double('schema_migration', all_versions: versions) }
-
- let(:instance) do
- Object.new.extend(described_class)
- end
-
- before do
- allow(instance).to receive(:schema_migration).and_return(schema_migration)
- end
-
- context 'when version files exist' do
- let(:versions) { %w(5 2 1000 200 4 93 2) }
+ let(:instance_class) do
+ klass = Class.new do
+ def dump_schema_information
+ original_dump_schema_information
+ end
+
+ def original_dump_schema_information
+ end
+ end
- it 'touches version files' do
- expect(Gitlab::Database::SchemaVersionFiles).to receive(:touch_all).with(versions)
+ klass.prepend(described_class)
- instance.dump_schema_information
- end
+ klass
end
- context 'when version files do not exist' do
- let(:versions) { [] }
+ let(:instance) { instance_class.new }
- it 'does not touch version files' do
- expect(Gitlab::Database::SchemaVersionFiles).not_to receive(:touch_all)
+ it 'calls SchemaMigrations touch_all and skips original implementation' do
+ expect(Gitlab::Database::SchemaMigrations).to receive(:touch_all).with(instance)
+ expect(instance).not_to receive(:original_dump_schema_information)
- instance.dump_schema_information
- end
+ instance.dump_schema_information
end
end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index ea8c9e2cfd7..2a1f91b5b21 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin do
end
end
- let(:config) { Rails.application.config_for(:database).merge(pool: 1) }
+ let(:config) { ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash.merge(pool: 1) }
let(:pool) { model.establish_connection(config) }
it 'calls the force disconnect callback on checkin' do
diff --git a/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb
index e9c512f94bb..c6542aa2adb 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/type_map_cache_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PostgresqlAdapter::TypeMapCache do
- let(:db_config) { ActiveRecord::Base.configurations.configs_for(env_name: 'test', name: 'primary').configuration_hash }
+ let(:db_config) { ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash }
let(:adapter_class) { ActiveRecord::ConnectionAdapters::PostgreSQLAdapter }
before do
diff --git a/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb
new file mode 100644
index 00000000000..3e675a85999
--- /dev/null
+++ b/spec/lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresqlDatabaseTasks::LoadSchemaVersionsMixin do
+ let(:instance_class) do
+ klass = Class.new do
+ def structure_load
+ original_structure_load
+ end
+
+ def original_structure_load
+ end
+ end
+
+ klass.prepend(described_class)
+
+ klass
+ end
+
+ let(:instance) { instance_class.new }
+
+ it 'calls SchemaMigrations load_all' do
+ connection = double('connection')
+ allow(instance).to receive(:connection).and_return(connection)
+
+ expect(instance).to receive(:original_structure_load).ordered
+ expect(Gitlab::Database::SchemaMigrations).to receive(:load_all).with(connection).ordered
+
+ instance.structure_load
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
deleted file mode 100644
index d9077969003..00000000000
--- a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
+++ /dev/null
@@ -1,303 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
- subject { described_class.new(index, logger: logger) }
-
- let(:table_name) { '_test_reindex_table' }
- let(:column_name) { '_test_column' }
- let(:index_name) { '_test_reindex_index' }
- let(:index) { instance_double(Gitlab::Database::PostgresIndex, indexrelid: 42, name: index_name, schema: 'public', tablename: table_name, partitioned?: false, unique?: false, exclusion?: false, expression?: false, definition: 'CREATE INDEX _test_reindex_index ON public._test_reindex_table USING btree (_test_column)') }
- let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
- let(:connection) { ActiveRecord::Base.connection }
-
- before do
- connection.execute(<<~SQL)
- CREATE TABLE #{table_name} (
- id serial NOT NULL PRIMARY KEY,
- #{column_name} integer NOT NULL);
-
- CREATE INDEX #{index.name} ON #{table_name} (#{column_name});
- SQL
- end
-
- context 'when the index is unique' do
- before do
- allow(index).to receive(:unique?).and_return(true)
- end
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
- end
- end
-
- context 'when the index is partitioned' do
- before do
- allow(index).to receive(:partitioned?).and_return(true)
- end
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /partitioned indexes are currently not supported/)
- end
- end
-
- context 'when the index serves an exclusion constraint' do
- before do
- allow(index).to receive(:exclusion?).and_return(true)
- end
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /indexes serving an exclusion constraint are currently not supported/)
- end
- end
-
- context 'when the index is a lingering temporary index from a previous reindexing run' do
- context 'with the temporary index prefix' do
- let(:index_name) { 'tmp_reindex_something' }
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /left-over temporary index/)
- end
- end
-
- context 'with the replaced index prefix' do
- let(:index_name) { 'old_reindex_something' }
-
- it 'raises an error' do
- expect do
- subject.perform
- end.to raise_error(described_class::ReindexError, /left-over temporary index/)
- end
- end
- end
-
- context 'replacing the original index with a rebuilt copy' do
- let(:replacement_name) { 'tmp_reindex_42' }
- let(:replaced_name) { 'old_reindex_42' }
-
- let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
- let(:drop_index) do
- <<~SQL
- DROP INDEX CONCURRENTLY
- IF EXISTS "public"."#{replacement_name}"
- SQL
- end
-
- let!(:original_index) { find_index_create_statement }
-
- it 'integration test: executing full index replacement without mocks' do
- allow(connection).to receive(:execute).and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
- end
-
- subject.perform
-
- check_index_exists
- end
-
- context 'mocked specs' do
- before do
- allow(subject).to receive(:connection).and_return(connection)
- allow(connection).to receive(:execute).and_call_original
- end
-
- it 'replaces the existing index with an identical index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name)
- expect_index_rename(replacement_name, index.name)
- expect_index_rename(replaced_name, replacement_name)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.perform
-
- check_index_exists
- end
-
- context 'for expression indexes' do
- before do
- allow(index).to receive(:expression?).and_return(true)
- end
-
- it 'rebuilds table statistics before dropping the original index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_to_execute_concurrently_in_order(<<~SQL)
- ANALYZE "#{index.schema}"."#{index.tablename}"
- SQL
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name)
- expect_index_rename(replacement_name, index.name)
- expect_index_rename(replaced_name, replacement_name)
-
- expect_index_drop(drop_index)
-
- subject.perform
-
- check_index_exists
- end
- end
-
- context 'when a dangling index is left from a previous run' do
- before do
- connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
- end
-
- it 'replaces the existing index with an identical index' do
- expect_index_drop(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name)
- expect_index_rename(replacement_name, index.name)
- expect_index_rename(replaced_name, replacement_name)
-
- expect_index_drop(drop_index)
-
- subject.perform
-
- check_index_exists
- end
- end
-
- context 'when it fails to create the replacement index' do
- it 'safely cleans up and signals the error' do
- expect(connection).to receive(:execute).with(create_index).ordered
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when the replacement index is not valid' do
- it 'safely cleans up and signals the error' do
- replacement_index = double('replacement index', valid_index?: false)
- allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.perform }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
-
- check_index_exists
- end
- end
-
- context 'when a database error occurs while swapping the indexes' do
- it 'safely cleans up and signals the error' do
- replacement_index = double('replacement index', valid_index?: true)
- allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
-
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_index_rename(index.name, replaced_name).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_index_drop(drop_index)
-
- expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when with_lock_retries fails to acquire the lock' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true)
- .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
- end
-
- expect_index_drop(drop_index)
-
- expect { subject.perform }.to raise_error(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, /exhausted/)
-
- check_index_exists
- end
- end
- end
- end
-
- def expect_to_execute_concurrently_in_order(sql)
- # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
- # verify the original call but pass through the non-concurrent form.
- expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
- end
- end
-
- def expect_index_rename(from, to)
- expect(connection).to receive(:execute).with(<<~SQL).ordered
- ALTER INDEX "public"."#{from}"
- RENAME TO "#{to}"
- SQL
- end
-
- def expect_index_drop(drop_index)
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
- end
-
- def find_index_create_statement
- ActiveRecord::Base.connection.select_value(<<~SQL)
- SELECT indexdef
- FROM pg_indexes
- WHERE schemaname = 'public'
- AND indexname = #{ActiveRecord::Base.connection.quote(index.name)}
- SQL
- end
-
- def check_index_exists
- expect(find_index_create_statement).to eq(original_index)
- end
-end
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
index ae6362ba812..085fd3061ad 100644
--- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -9,16 +9,9 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
describe '.perform' do
subject { described_class.new(index, notifier).perform }
- before do
- swapout_view_for_table(:postgres_indexes)
-
- allow(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).and_return(reindexer)
- allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
- end
-
let(:index) { create(:postgres_index) }
let(:notifier) { instance_double(Gitlab::Database::Reindexing::GrafanaNotifier, notify_start: nil, notify_end: nil) }
- let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ConcurrentReindex, perform: nil) }
+ let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ReindexConcurrently, perform: nil) }
let(:action) { create(:reindex_action, index: index) }
let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
@@ -26,6 +19,13 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
let(:lease_timeout) { 1.day }
let(:uuid) { 'uuid' }
+ before do
+ swapout_view_for_table(:postgres_indexes)
+
+ allow(Gitlab::Database::Reindexing::ReindexConcurrently).to receive(:new).with(index).and_return(reindexer)
+ allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
+ end
+
context 'locking' do
it 'acquires a lock while reindexing' do
expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
it 'does not perform reindexing actions if lease is not granted' do
expect(lease).to receive(:try_obtain).ordered.and_return(false)
- expect(Gitlab::Database::Reindexing::ConcurrentReindex).not_to receive(:new)
+ expect(Gitlab::Database::Reindexing::ReindexConcurrently).not_to receive(:new)
subject
end
diff --git a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
index 4466679a099..ee3f2b1b415 100644
--- a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
@@ -10,20 +10,50 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
before do
swapout_view_for_table(:postgres_index_bloat_estimates)
swapout_view_for_table(:postgres_indexes)
+
+ create_list(:postgres_index, 10, ondisk_size_bytes: 10.gigabytes).each_with_index do |index, i|
+ create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 2.gigabyte * (i + 1))
+ end
end
def execute(sql)
ActiveRecord::Base.connection.execute(sql)
end
- it 'orders by highest bloat first' do
- create_list(:postgres_index, 10).each_with_index do |index, i|
- create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 1.megabyte * i)
- end
+ it 'orders by highest relative bloat first' do
+ expected = Gitlab::Database::PostgresIndex.all.sort_by(&:relative_bloat_level).reverse.map(&:name)
+
+ expect(subject.map(&:name)).to eq(expected)
+ end
+
+ it 'excludes indexes with a relative bloat level below 20%' do
+ excluded = create(
+ :postgres_index_bloat_estimate,
+ index: create(:postgres_index, ondisk_size_bytes: 10.gigabytes),
+ bloat_size_bytes: 1.9.gigabyte # 19% relative index bloat
+ )
- expected = Gitlab::Database::PostgresIndexBloatEstimate.order(bloat_size_bytes: :desc).map(&:index)
+ expect(subject).not_to include(excluded.index)
+ end
+
+ it 'excludes indexes smaller than 1 GB ondisk size' do
+ excluded = create(
+ :postgres_index_bloat_estimate,
+ index: create(:postgres_index, ondisk_size_bytes: 0.99.gigabytes),
+ bloat_size_bytes: 0.8.gigabyte
+ )
+
+ expect(subject).not_to include(excluded.index)
+ end
+
+ it 'excludes indexes larger than 100 GB ondisk size' do
+ excluded = create(
+ :postgres_index_bloat_estimate,
+ index: create(:postgres_index, ondisk_size_bytes: 101.gigabytes),
+ bloat_size_bytes: 25.gigabyte
+ )
- expect(subject).to eq(expected)
+ expect(subject).not_to include(excluded.index)
end
context 'with time frozen' do
@@ -31,20 +61,17 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
freeze_time { example.run }
end
- it 'does not return indexes with reindex action in the last 7 days' do
- not_recently_reindexed = create_list(:postgres_index, 2).each_with_index do |index, i|
- create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 1.megabyte * i)
- create(:reindex_action, index: index, action_end: Time.zone.now - 7.days - 1.minute)
+ it 'does not return indexes with reindex action in the last 10 days' do
+ not_recently_reindexed = Gitlab::Database::PostgresIndex.all.each do |index|
+ create(:reindex_action, index: index, action_end: Time.zone.now - 10.days - 1.minute)
end
- create_list(:postgres_index, 2).each_with_index do |index, i|
- create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 1.megabyte * i)
+ create_list(:postgres_index, 10, ondisk_size_bytes: 10.gigabytes).each_with_index do |index, i|
+ create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 2.gigabyte * (i + 1))
create(:reindex_action, index: index, action_end: Time.zone.now)
end
- expected = Gitlab::Database::PostgresIndexBloatEstimate.where(identifier: not_recently_reindexed.map(&:identifier)).map(&:index).map(&:identifier).sort
-
- expect(subject.map(&:identifier).sort).to eq(expected)
+ expect(subject.map(&:name).sort).to eq(not_recently_reindexed.map(&:name).sort)
end
end
end
diff --git a/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb
new file mode 100644
index 00000000000..6f87475fc94
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ReindexConcurrently, '#perform' do
+ subject { described_class.new(index, logger: logger).perform }
+
+ let(:table_name) { '_test_reindex_table' }
+ let(:column_name) { '_test_column' }
+ let(:index_name) { '_test_reindex_index' }
+ let(:index) { Gitlab::Database::PostgresIndex.by_identifier("public.#{iname(index_name)}") }
+ let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ #{column_name} integer NOT NULL);
+
+ CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ context 'when the index serves an exclusion constraint' do
+ before do
+ allow(index).to receive(:exclusion?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /indexes serving an exclusion constraint are currently not supported/)
+ end
+ end
+
+ context 'when attempting to reindex an expression index' do
+ before do
+ allow(index).to receive(:expression?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /expression indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index is a dangling temporary index from a previous reindexing run' do
+ context 'with the temporary index prefix' do
+ let(:index_name) { '_test_reindex_index_ccnew' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+
+ context 'with the temporary index prefix with a counter' do
+ let(:index_name) { '_test_reindex_index_ccnew1' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+ end
+
+ it 'recreates the index using REINDEX with a long statement timeout' do
+ expect_to_execute_in_order(
+ "SET statement_timeout TO '32400s'",
+ "REINDEX INDEX CONCURRENTLY \"public\".\"#{index.name}\"",
+ "RESET statement_timeout"
+ )
+
+ subject
+ end
+
+ context 'with dangling indexes matching TEMPORARY_INDEX_PATTERN, i.e. /some\_index\_ccnew(\d)*/' do
+ before do
+ # dangling indexes
+ connection.execute("CREATE INDEX #{iname(index_name, '_ccnew')} ON #{table_name} (#{column_name})")
+ connection.execute("CREATE INDEX #{iname(index_name, '_ccnew2')} ON #{table_name} (#{column_name})")
+
+ # Unrelated index - don't drop
+ connection.execute("CREATE INDEX some_other_index_ccnew ON #{table_name} (#{column_name})")
+ end
+
+ shared_examples_for 'dropping the dangling index' do
+ it 'drops the dangling indexes while controlling lock_timeout' do
+ expect_to_execute_in_order(
+ # Regular index rebuild
+ "SET statement_timeout TO '32400s'",
+ "REINDEX INDEX CONCURRENTLY \"public\".\"#{index_name}\"",
+ "RESET statement_timeout",
+ # Drop _ccnew index
+ "SET lock_timeout TO '60000ms'",
+ "DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"#{iname(index_name, '_ccnew')}\"",
+ "RESET idle_in_transaction_session_timeout; RESET lock_timeout",
+ # Drop _ccnew2 index
+ "SET lock_timeout TO '60000ms'",
+ "DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"#{iname(index_name, '_ccnew2')}\"",
+ "RESET idle_in_transaction_session_timeout; RESET lock_timeout"
+ )
+
+ subject
+ end
+ end
+
+ context 'with normal index names' do
+ it_behaves_like 'dropping the dangling index'
+ end
+
+ context 'with index name at 63 character limit' do
+ let(:index_name) { 'a' * 63 }
+
+ before do
+ # Another unrelated index - don't drop
+ extra_index = index_name[0...55]
+ connection.execute("CREATE INDEX #{extra_index}_ccnew ON #{table_name} (#{column_name})")
+ end
+
+ it_behaves_like 'dropping the dangling index'
+ end
+ end
+
+ def iname(name, suffix = '')
+ "#{name[0...63 - suffix.size]}#{suffix}"
+ end
+
+ def expect_to_execute_in_order(*queries)
+ # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
+ # verify the original call but pass through the non-concurrent form.
+ queries.each do |query|
+ expect(connection).to receive(:execute).with(query).ordered.and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index b2f038e8b62..8aff99544ca 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Database::Reindexing do
it 'retrieves regular indexes that are no left-overs from previous runs' do
result = double
- expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.where.not_match.not_match').with(no_args).with('NOT expression').with('^tmp_reindex_').with('^old_reindex_').and_return(result)
+ expect(Gitlab::Database::PostgresIndex).to receive_message_chain('not_match.reindexing_support').with('\_ccnew[0-9]*$').with(no_args).and_return(result)
expect(subject).to eq(result)
end
diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
new file mode 100644
index 00000000000..f3bed9b40d6
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaMigrations::Context do
+ let(:connection) { ActiveRecord::Base.connection }
+
+ let(:context) { described_class.new(connection) }
+
+ describe '#schema_directory' do
+ it 'returns db/schema_migrations' do
+ expect(context.schema_directory).to eq(File.join(Rails.root, 'db/schema_migrations'))
+ end
+
+ context 'multiple databases' do
+ let(:connection) { Ci::BaseModel.connection }
+
+ it 'returns a directory path that is database specific' do
+ skip_if_multiple_databases_not_setup
+
+ expect(context.schema_directory).to eq(File.join(Rails.root, 'db/ci_schema_migrations'))
+ end
+ end
+ end
+
+ describe '#versions_to_create' do
+ before do
+ allow(connection).to receive_message_chain(:schema_migration, :all_versions).and_return(migrated_versions)
+
+ migrations_struct = Struct.new(:version)
+ migrations = file_versions.map { |version| migrations_struct.new(version) }
+ allow(connection).to receive_message_chain(:migration_context, :migrations).and_return(migrations)
+ end
+
+ let(:version1) { '20200123' }
+ let(:version2) { '20200410' }
+ let(:version3) { '20200602' }
+ let(:version4) { '20200809' }
+
+ let(:migrated_versions) { file_versions }
+ let(:file_versions) { [version1, version2, version3, version4] }
+
+ context 'migrated versions is the same as migration file versions' do
+ it 'returns migrated versions' do
+ expect(context.versions_to_create).to eq(migrated_versions)
+ end
+ end
+
+ context 'migrated versions is subset of migration file versions' do
+ let(:migrated_versions) { [version1, version2] }
+
+ it 'returns migrated versions' do
+ expect(context.versions_to_create).to eq(migrated_versions)
+ end
+ end
+
+ context 'migrated versions is superset of migration file versions' do
+ let(:migrated_versions) { file_versions + ['20210809'] }
+
+ it 'returns file versions' do
+ expect(context.versions_to_create).to eq(file_versions)
+ end
+ end
+
+ context 'migrated versions has slightly different versions to migration file versions' do
+ let(:migrated_versions) { [version1, version2, version3, version4, '20210101'] }
+ let(:file_versions) { [version1, version2, version3, version4, '20210102'] }
+
+ it 'returns the common set' do
+ expect(context.versions_to_create).to eq([version1, version2, version3, version4])
+ end
+ end
+ end
+
+ def skip_if_multiple_databases_not_setup
+ skip 'Skipping because multiple databases not set up' unless Gitlab::Database.has_config?(:ci)
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_version_files_spec.rb b/spec/lib/gitlab/database/schema_migrations/migrations_spec.rb
index c3b3ae0a07f..8be776fdb88 100644
--- a/spec/lib/gitlab/database/schema_version_files_spec.rb
+++ b/spec/lib/gitlab/database/schema_migrations/migrations_spec.rb
@@ -2,43 +2,37 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::SchemaVersionFiles do
- describe '.touch_all' do
+RSpec.describe Gitlab::Database::SchemaMigrations::Migrations do
+ let(:connection) { ApplicationRecord.connection }
+ let(:context) { Gitlab::Database::SchemaMigrations::Context.new(connection) }
+
+ let(:migrations) { described_class.new(context) }
+
+ describe '#touch_all' do
let(:version1) { '20200123' }
let(:version2) { '20200410' }
let(:version3) { '20200602' }
let(:version4) { '20200809' }
+
let(:relative_schema_directory) { 'db/schema_migrations' }
- let(:relative_migrate_directory) { 'db/migrate' }
- let(:relative_post_migrate_directory) { 'db/post_migrate' }
it 'creates a file containing a checksum for each version with a matching migration' do
Dir.mktmpdir do |tmpdir|
schema_directory = Pathname.new(tmpdir).join(relative_schema_directory)
- migrate_directory = Pathname.new(tmpdir).join(relative_migrate_directory)
- post_migrate_directory = Pathname.new(tmpdir).join(relative_post_migrate_directory)
-
- FileUtils.mkdir_p(migrate_directory)
- FileUtils.mkdir_p(post_migrate_directory)
FileUtils.mkdir_p(schema_directory)
- migration1_filepath = migrate_directory.join("#{version1}_migration.rb")
- FileUtils.touch(migration1_filepath)
-
- migration2_filepath = post_migrate_directory.join("#{version2}_post_migration.rb")
- FileUtils.touch(migration2_filepath)
-
old_version_filepath = schema_directory.join('20200101')
FileUtils.touch(old_version_filepath)
expect(File.exist?(old_version_filepath)).to be(true)
- allow(described_class).to receive(:schema_directory).and_return(schema_directory)
- allow(described_class).to receive(:migration_directories).and_return([migrate_directory, post_migrate_directory])
+ allow(context).to receive(:schema_directory).and_return(schema_directory)
+ allow(context).to receive(:versions_to_create).and_return([version1, version2])
- described_class.touch_all([version1, version2, version3, version4])
+ migrations.touch_all
expect(File.exist?(old_version_filepath)).to be(false)
+
[version1, version2].each do |version|
version_filepath = schema_directory.join(version)
expect(File.exist?(version_filepath)).to be(true)
@@ -55,12 +49,9 @@ RSpec.describe Gitlab::Database::SchemaVersionFiles do
end
end
- describe '.load_all' do
- let(:connection) { double('connection') }
-
+ describe '#load_all' do
before do
- allow(described_class).to receive(:connection).and_return(connection)
- allow(described_class).to receive(:find_version_filenames).and_return(filenames)
+ allow(migrations).to receive(:version_filenames).and_return(filenames)
end
context 'when there are no version files' do
@@ -70,7 +61,7 @@ RSpec.describe Gitlab::Database::SchemaVersionFiles do
expect(connection).not_to receive(:quote_string)
expect(connection).not_to receive(:execute)
- described_class.load_all
+ migrations.load_all
end
end
@@ -88,7 +79,7 @@ RSpec.describe Gitlab::Database::SchemaVersionFiles do
ON CONFLICT DO NOTHING
SQL
- described_class.load_all
+ migrations.load_all
end
end
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
index e93d8ab590d..ff8e76311ae 100644
--- a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
@@ -37,8 +37,10 @@ RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
+ configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
+
# Initiating a second DB connection for the lock
- conn = ActiveRecordSecond.establish_connection(Rails.configuration.database_configuration[Rails.env]).connection
+ conn = ActiveRecordSecond.establish_connection(configuration).connection
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index df2c506e163..367f793b117 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -37,8 +37,10 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when lock retry is enabled' do
let(:lock_fiber) do
Fiber.new do
+ configuration = ActiveRecordSecond.configurations.find_db_config(Rails.env).configuration_hash
+
# Initiating a second DB connection for the lock
- conn = ActiveRecordSecond.establish_connection(Rails.configuration.database_configuration[Rails.env]).connection
+ conn = ActiveRecordSecond.establish_connection(configuration).connection
conn.transaction do
conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
index e70b34d6557..2740664d200 100644
--- a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -56,10 +56,10 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
it "tracks successful install" do
expect(::Gitlab::Tracking).to receive(:event).with(
- 'instance_administrators_group', 'group_created'
+ 'instance_administrators_group', 'group_created', namespace: group
)
- result
+ subject.execute
end
it 'creates group' do
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index 28291508ac0..f5ea660ee1e 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -63,11 +63,11 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
application_setting.update(allow_local_requests_from_web_hooks_and_services: true)
end
- shared_examples 'has prometheus service' do |server_address|
+ shared_examples 'has prometheus integration' do |server_address|
it do
expect(result[:status]).to eq(:success)
- prometheus = project.prometheus_service
+ prometheus = project.prometheus_integration
expect(prometheus).not_to eq(nil)
expect(prometheus.api_url).to eq(server_address)
expect(prometheus.active).to eq(true)
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
end
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
it 'is idempotent' do
result1 = subject.execute
@@ -86,10 +86,10 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
it "tracks successful install" do
- expect(::Gitlab::Tracking).to receive(:event).twice
- expect(::Gitlab::Tracking).to receive(:event).with('self_monitoring', 'project_created')
+ expect(::Gitlab::Tracking).to receive(:event).with("instance_administrators_group", "group_created", namespace: project.namespace)
+ expect(::Gitlab::Tracking).to receive(:event).with('self_monitoring', 'project_created', project: project, namespace: project.namespace)
- result
+ subject.execute
end
it 'creates group' do
@@ -134,13 +134,13 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
expect(application_setting.reload.self_monitoring_project_id).to eq(project.id)
end
- it 'creates a Prometheus service' do
+ it 'creates a Prometheus integration' do
expect(result[:status]).to eq(:success)
integrations = result[:project].reload.integrations
expect(integrations.count).to eq(1)
- # Ensures PrometheusService#self_monitoring_project? is true
+ # Ensures Integrations::Prometheus#self_monitoring_project? is true
expect(integrations.first.allow_local_api_url?).to be_truthy
end
@@ -193,12 +193,12 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
end
end
- context 'when local requests from hooks and services are not allowed' do
+ context 'when local requests from hooks and integrations are not allowed' do
before do
application_setting.update(allow_local_requests_from_web_hooks_and_services: false)
end
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
end
context 'with non default prometheus address' do
@@ -211,18 +211,18 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
}
end
- it_behaves_like 'has prometheus service', 'https://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'https://localhost:9090'
context 'with :9090 symbol' do
let(:server_address) { :':9090' }
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
end
context 'with 0.0.0.0:9090' do
let(:server_address) { '0.0.0.0:9090' }
- it_behaves_like 'has prometheus service', 'http://localhost:9090'
+ it_behaves_like 'has prometheus integration', 'http://localhost:9090'
end
end
@@ -233,7 +233,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not fail' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
@@ -244,7 +244,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not fail' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
@@ -258,7 +258,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not configure prometheus' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
@@ -267,7 +267,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'does not configure prometheus' do
expect(result).to include(status: :success)
- expect(project.prometheus_service).to be_nil
+ expect(project.prometheus_integration).to be_nil
end
end
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 847f7ec2d74..a834e41c019 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -41,6 +41,79 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.has_config?' do
+ context 'two tier database config' do
+ before do
+ allow(Gitlab::Application).to receive_message_chain(:config, :database_configuration, :[]).with(Rails.env)
+ .and_return({ "adapter" => "postgresql", "database" => "gitlabhq_test" })
+ end
+
+ it 'returns false for primary' do
+ expect(described_class.has_config?(:primary)).to eq(false)
+ end
+
+ it 'returns false for ci' do
+ expect(described_class.has_config?(:ci)).to eq(false)
+ end
+ end
+
+ context 'three tier database config' do
+ before do
+ allow(Gitlab::Application).to receive_message_chain(:config, :database_configuration, :[]).with(Rails.env)
+ .and_return({
+ "primary" => { "adapter" => "postgresql", "database" => "gitlabhq_test" },
+ "ci" => { "adapter" => "postgresql", "database" => "gitlabhq_test_ci" }
+ })
+ end
+
+ it 'returns true for primary' do
+ expect(described_class.has_config?(:primary)).to eq(true)
+ end
+
+ it 'returns true for ci' do
+ expect(described_class.has_config?(:ci)).to eq(true)
+ end
+
+ it 'returns false for non-existent' do
+ expect(described_class.has_config?(:nonexistent)).to eq(false)
+ end
+ end
+ end
+
+ describe '.main_database?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:database_name, :result) do
+ :main | true
+ 'main' | true
+ :ci | false
+ 'ci' | false
+ :archive | false
+ 'archive' | false
+ end
+
+ with_them do
+ it { expect(described_class.main_database?(database_name)).to eq(result) }
+ end
+ end
+
+ describe '.ci_database?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:database_name, :result) do
+ :main | false
+ 'main' | false
+ :ci | true
+ 'ci' | true
+ :archive | false
+ 'archive' | false
+ end
+
+ with_them do
+ it { expect(described_class.ci_database?(database_name)).to eq(result) }
+ end
+ end
+
describe '.adapter_name' do
it 'returns the name of the adapter' do
expect(described_class.adapter_name).to be_an_instance_of(String)
@@ -414,6 +487,23 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.dbname' do
+ it 'returns the dbname for the connection' do
+ connection = ActiveRecord::Base.connection
+
+ expect(described_class.dbname(connection)).to be_a(String)
+ expect(described_class.dbname(connection)).to eq(connection.pool.db_config.database)
+ end
+
+ context 'when the pool is a NullPool' do
+ it 'returns unknown' do
+ connection = double(:active_record_connection, pool: ActiveRecord::ConnectionAdapters::NullPool.new)
+
+ expect(described_class.dbname(connection)).to eq('unknown')
+ end
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
diff --git a/spec/lib/gitlab/deploy_key_access_spec.rb b/spec/lib/gitlab/deploy_key_access_spec.rb
index e186e993d8f..83b97c8ba25 100644
--- a/spec/lib/gitlab/deploy_key_access_spec.rb
+++ b/spec/lib/gitlab/deploy_key_access_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::DeployKeyAccess do
let_it_be(:user) { create(:user) }
let_it_be(:deploy_key) { create(:deploy_key, user: user) }
+
let(:project) { create(:project, :repository) }
let(:protected_branch) { create(:protected_branch, :no_one_can_push, project: project) }
diff --git a/spec/lib/gitlab/diff/file_collection/base_spec.rb b/spec/lib/gitlab/diff/file_collection/base_spec.rb
new file mode 100644
index 00000000000..00d3aa47301
--- /dev/null
+++ b/spec/lib/gitlab/diff/file_collection/base_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::FileCollection::Base do
+ let(:merge_request) { create(:merge_request) }
+ let(:diffable) { merge_request.merge_request_diff }
+ let(:diff_options) { {} }
+
+ describe '#overflow?' do
+ subject(:overflown) { described_class.new(diffable, project: merge_request.project, diff_options: diff_options).overflow? }
+
+ context 'when it is not overflown' do
+ it 'returns false' do
+ expect(overflown).to eq(false)
+ end
+ end
+
+ context 'when it is overflown' do
+ let(:diff_options) { { max_files: 1 } }
+
+ it 'returns true' do
+ expect(overflown).to eq(true)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/file_collection/commit_spec.rb b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
index 3d995b36b6f..cfb5f50edbe 100644
--- a/spec/lib/gitlab/diff/file_collection/commit_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
@@ -75,4 +75,12 @@ RSpec.describe Gitlab::Diff::FileCollection::Commit do
]
end
end
+
+ describe '#cache_key' do
+ subject(:cache_key) { described_class.new(diffable, diff_options: nil).cache_key }
+
+ it 'returns with the commit id' do
+ expect(cache_key).to eq ['commit', diffable.id]
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/file_collection/compare_spec.rb b/spec/lib/gitlab/diff/file_collection/compare_spec.rb
index f3326f4f03d..ce70903a480 100644
--- a/spec/lib/gitlab/diff/file_collection/compare_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/compare_spec.rb
@@ -15,29 +15,20 @@ RSpec.describe Gitlab::Diff::FileCollection::Compare do
head_commit.id)
end
- it_behaves_like 'diff statistics' do
- let(:collection_default_args) do
- {
- project: diffable.project,
- diff_options: {},
- diff_refs: diffable.diff_refs
- }
- end
+ let(:diffable) { Compare.new(raw_compare, project) }
+ let(:collection_default_args) do
+ {
+ project: diffable.project,
+ diff_options: {},
+ diff_refs: diffable.diff_refs
+ }
+ end
- let(:diffable) { Compare.new(raw_compare, project) }
+ it_behaves_like 'diff statistics' do
let(:stub_path) { '.gitignore' }
end
it_behaves_like 'sortable diff files' do
- let(:diffable) { Compare.new(raw_compare, project) }
- let(:collection_default_args) do
- {
- project: diffable.project,
- diff_options: {},
- diff_refs: diffable.diff_refs
- }
- end
-
let(:unsorted_diff_files_paths) do
[
'.DS_Store',
@@ -66,4 +57,12 @@ RSpec.describe Gitlab::Diff::FileCollection::Compare do
]
end
end
+
+ describe '#cache_key' do
+ subject(:cache_key) { described_class.new(diffable, **collection_default_args).cache_key }
+
+ it 'returns with head and base' do
+ expect(cache_key).to eq ['compare', head_commit.id, start_commit.id]
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb
new file mode 100644
index 00000000000..51bee6d45e4
--- /dev/null
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBase do
+ let(:merge_request) { create(:merge_request) }
+ let(:diffable) { merge_request.merge_request_diff }
+
+ describe '#overflow?' do
+ subject(:overflown) { described_class.new(diffable, diff_options: nil).overflow? }
+
+ context 'when it is not overflown' do
+ it 'returns false' do
+ expect(overflown).to eq(false)
+ end
+ end
+
+ context 'when it is overflown' do
+ before do
+ diffable.update!(state: :overflow)
+ end
+
+ it 'returns true' do
+ expect(overflown).to eq(true)
+ end
+ end
+ end
+
+ describe '#cache_key' do
+ subject(:cache_key) { described_class.new(diffable, diff_options: nil).cache_key }
+
+ it 'returns cache_key from merge_request_diff' do
+ expect(cache_key).to eq diffable.cache_key
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
index 670c734ce08..beb85d383a0 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
let(:merge_request) { create(:merge_request) }
- let(:batch_page) { 1 }
+ let(:batch_page) { 0 }
let(:batch_size) { 10 }
let(:diffable) { merge_request.merge_request_diff }
let(:diff_files_relation) { diffable.merge_request_diff_files }
@@ -18,19 +18,15 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
let(:diff_files) { subject.diff_files }
- before do
- stub_feature_flags(diffs_gradual_load: false)
- end
-
describe 'initialize' do
it 'memoizes pagination_data' do
- expect(subject.pagination_data).to eq(current_page: 1, next_page: 2, total_pages: 2)
+ expect(subject.pagination_data).to eq(total_pages: 20)
end
end
describe '#diff_files' do
let(:batch_size) { 3 }
- let(:paginated_rel) { diff_files_relation.page(batch_page).per(batch_size) }
+ let(:paginated_rel) { diff_files_relation.offset(batch_page).limit(batch_size) }
let(:expected_batch_files) do
paginated_rel.map(&:new_path)
@@ -51,7 +47,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
end
context 'another page' do
- let(:batch_page) { 2 }
+ let(:batch_page) { 1 }
it 'returns correct diff files' do
expect(diff_files.map(&:new_path)).to eq(expected_batch_files)
@@ -63,7 +59,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
it 'returns correct diff files' do
expected_batch_files =
- diff_files_relation.page(described_class::DEFAULT_BATCH_PAGE).per(batch_size).map(&:new_path)
+ diff_files_relation.offset(described_class::DEFAULT_BATCH_PAGE).limit(batch_size).map(&:new_path)
expect(diff_files.map(&:new_path)).to eq(expected_batch_files)
end
@@ -74,7 +70,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
it 'returns correct diff files' do
expected_batch_files =
- diff_files_relation.page(batch_page).per(described_class::DEFAULT_BATCH_SIZE).map(&:new_path)
+ diff_files_relation.offset(batch_page).limit(described_class::DEFAULT_BATCH_SIZE).map(&:new_path)
expect(diff_files.map(&:new_path)).to eq(expected_batch_files)
end
@@ -90,29 +86,17 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
context 'last page' do
it 'returns correct diff files' do
- last_page = paginated_rel.total_pages
+ last_page = diff_files_relation.count - batch_size
collection = described_class.new(diffable,
last_page,
batch_size,
diff_options: nil)
- expected_batch_files = diff_files_relation.page(last_page).per(batch_size).map(&:new_path)
+ expected_batch_files = diff_files_relation.offset(last_page).limit(batch_size).map(&:new_path)
expect(collection.diff_files.map(&:new_path)).to eq(expected_batch_files)
end
end
-
- context 'with diffs gradual load feature flag enabled' do
- let(:batch_page) { 0 }
-
- before do
- stub_feature_flags(diffs_gradual_load: true)
- end
-
- it 'returns correct diff files' do
- expect(subject.diffs.map(&:new_path)).to eq(diff_files_relation.page(1).per(batch_size).map(&:new_path))
- end
- end
end
it_behaves_like 'unfoldable diff' do
@@ -130,7 +114,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
end
let(:diffable) { merge_request.merge_request_diff }
- let(:batch_page) { 2 }
+ let(:batch_page) { 10 }
let(:stub_path) { '.gitignore' }
subject do
diff --git a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
index d9f384fb47f..bdeaabec1f1 100644
--- a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
+++ b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy do
+RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy, :clean_gitlab_redis_cache do
# Douwe's diary New York City, 2016-06-28
# --------------------------------------------------------------------------
#
@@ -288,6 +288,27 @@ RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy do
new_line: old_position.new_line
)
end
+
+ context "when the position is multiline" do
+ let(:old_position) do
+ position(
+ new_path: file_name,
+ new_line: 2,
+ line_range: {
+ "start_line_code" => 1,
+ "end_line_code" => 2
+ }
+ )
+ end
+
+ it "returns the new position along with line_range" do
+ expect_new_position(
+ new_path: old_position.new_path,
+ new_line: old_position.new_line,
+ line_range: old_position.line_range
+ )
+ end
+ end
end
context "when the file's content was changed between the old and the new diff" do
@@ -547,6 +568,29 @@ RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy do
new_line: 2
)
end
+
+ context "when the position is multiline" do
+ let(:old_position) do
+ position(
+ new_path: file_name,
+ new_line: 2,
+ line_range: {
+ "start_line_code" => 1,
+ "end_line_code" => 2
+ }
+ )
+ end
+
+ it "returns the new position but drops line_range information" do
+ expect_change_position(
+ old_path: file_name,
+ new_path: file_name,
+ old_line: nil,
+ new_line: 2,
+ line_range: nil
+ )
+ end
+ end
end
context "when the file's content was changed between the old and the new diff" do
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 3a60564d8d2..e8470657181 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
let(:email_raw) { email_fixture('emails/service_desk.eml') }
let_it_be(:group) { create(:group, :private, name: "email") }
+
let(:expected_description) do
"Service desk stuff!\n\n```\na = b\n```\n\n`/label ~label1`\n`/assign @user1`\n`/close`\n![image](uploads/image.png)"
end
@@ -50,6 +51,15 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
it 'sends thank you email' do
expect { receiver.execute }.to have_enqueued_job.on_queue('mailers')
end
+
+ it 'adds metric events for incoming and reply emails' do
+ metric_transaction = double('Gitlab::Metrics::WebTransaction', increment: true, observe: true)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
+ expect(metric_transaction).to receive(:add_event).with(:receive_email_service_desk, { handler: 'Gitlab::Email::Handler::ServiceDeskHandler' })
+ expect(metric_transaction).to receive(:add_event).with(:service_desk_thank_you_email)
+
+ receiver.execute
+ end
end
context 'when everything is fine' do
@@ -169,6 +179,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when using service desk key' do
let_it_be(:service_desk_key) { 'mykey' }
+
let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml') }
let(:receiver) { Gitlab::Email::ServiceDeskReceiver.new(email_raw) }
@@ -200,6 +211,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when there are multiple projects with same key' do
let_it_be(:project_with_same_key) { create(:project, group: group, service_desk_enabled: true) }
+
let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', slug: project_with_same_key.full_path_slug.to_s) }
before do
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 2c1fe529a5d..b1a04f0592a 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -5,106 +5,125 @@ require 'spec_helper'
RSpec.describe Gitlab::Email::Receiver do
include_context :email_shared_context
- shared_examples 'correctly finds the mail key and adds metric event' do
- let(:metric_transaction) { double('Gitlab::Metrics::WebTransaction') }
+ let(:metric_transaction) { instance_double(Gitlab::Metrics::WebTransaction) }
- specify :aggregate_failures do
+ shared_examples 'successful receive' do
+ let_it_be(:project) { create(:project) }
+
+ let(:handler) { double(:handler, project: project, execute: true, metrics_event: nil, metrics_params: nil) }
+
+ it 'correctly finds the mail key' do
expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
+
+ receiver.execute
+ end
+
+ it 'adds metric event' do
+ allow(receiver).to receive(:handler).and_return(handler)
+
expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
expect(metric_transaction).to receive(:add_event).with(handler.metrics_event, handler.metrics_params)
receiver.execute
end
+
+ it 'returns valid metadata' do
+ allow(receiver).to receive(:handler).and_return(handler)
+
+ metadata = receiver.mail_metadata
+
+ expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta))
+ expect(metadata[:meta]).to include(client_id: 'email/jake@example.com', project: project.full_path)
+ expect(metadata[meta_key]).to eq(meta_value)
+ end
end
context 'when the email contains a valid email address in a header' do
- let(:handler) { double(:handler) }
- let(:metadata) { receiver.mail_metadata }
-
before do
- allow(handler).to receive(:execute)
- allow(handler).to receive(:metrics_params)
- allow(handler).to receive(:metrics_event)
-
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
-
- expect(receiver.mail_metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to))
end
context 'when in a Delivered-To header' do
let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') }
+ let(:meta_key) { :delivered_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
-
- it 'parses the metadata' do
- expect(metadata[:delivered_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"])
- end
+ it_behaves_like 'successful receive'
end
context 'when in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header.eml') }
+ let(:meta_key) { :envelope_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
-
- it 'parses the metadata' do
- expect(metadata[:envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
- end
+ it_behaves_like 'successful receive'
end
context 'when in an X-Envelope-To header' do
let(:email_raw) { fixture_file('emails/x_envelope_to_header.eml') }
+ let(:meta_key) { :x_envelope_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
-
- it 'parses the metadata' do
- expect(metadata[:x_envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
- end
+ it_behaves_like 'successful receive'
end
context 'when enclosed with angle brackets in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header_with_angle_brackets.eml') }
+ let(:meta_key) { :envelope_to }
+ let(:meta_value) { ["<incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com>"] }
- it_behaves_like 'correctly finds the mail key and adds metric event'
+ it_behaves_like 'successful receive'
end
end
- context "when we cannot find a capable handler" do
- let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "!!!") }
+ shared_examples 'failed receive' do
+ it 'adds metric event' do
+ expect(::Gitlab::Metrics::BackgroundTransaction).to receive(:current).and_return(metric_transaction)
+ expect(metric_transaction).to receive(:add_event).with('email_receiver_error', { error: expected_error.name })
- it "raises an UnknownIncomingEmail error" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail)
+ expect { receiver.execute }.to raise_error(expected_error)
end
end
- context "when the email is blank" do
- let(:email_raw) { "" }
+ context 'when we cannot find a capable handler' do
+ let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, '!!!') }
+ let(:expected_error) { Gitlab::Email::UnknownIncomingEmail }
- it "raises an EmptyEmailError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::EmptyEmailError)
- end
+ it_behaves_like 'failed receive'
end
- context "when the email was auto generated with Auto-Submitted header" do
- let(:email_raw) { fixture_file("emails/auto_submitted.eml") }
+ context 'when the email is blank' do
+ let(:email_raw) { '' }
+ let(:expected_error) { Gitlab::Email::EmptyEmailError }
- it "raises an AutoGeneratedEmailError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::AutoGeneratedEmailError)
- end
+ it_behaves_like 'failed receive'
end
- context "when the email was auto generated with X-Autoreply header" do
- let(:email_raw) { fixture_file("emails/auto_reply.eml") }
+ context 'when the email was auto generated with Auto-Submitted header' do
+ let(:email_raw) { fixture_file('emails/auto_submitted.eml') }
+ let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
- it "raises an AutoGeneratedEmailError" do
- expect { receiver.execute }.to raise_error(Gitlab::Email::AutoGeneratedEmailError)
- end
+ it_behaves_like 'failed receive'
end
- it "requires all handlers to have a unique metric_event" do
+ context 'when the email was auto generated with X-Autoreply header' do
+ let(:email_raw) { fixture_file('emails/auto_reply.eml') }
+ let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
+
+ it_behaves_like 'failed receive'
+ end
+
+ it 'requires all handlers to have a unique metric_event' do
events = Gitlab::Email::Handler.handlers.map do |handler|
handler.new(Mail::Message.new, 'gitlabhq/gitlabhq+auth_token').metrics_event
end
expect(events.uniq.count).to eq events.count
end
+
+ it 'requires all handlers to respond to #project' do
+ Gitlab::Email::Handler.load_handlers.each do |handler|
+ expect { handler.new(nil, nil).project }.not_to raise_error
+ end
+ end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 6076e525f06..9acc7fd04be 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -15,6 +15,18 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
let(:event) { Raven::Event.from_exception(exception, required_options.merge(data)) }
let(:result_hash) { described_class.call(event).to_hash }
+ let(:data) do
+ {
+ extra: {
+ caller: 'test'
+ },
+ fingerprint: [
+ 'GRPC::DeadlineExceeded',
+ '4:Deadline Exceeded. debug_error_string:{"created":"@1598938192.005782000","description":"Error received from peer unix:/home/git/gitalypraefect.socket","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Deadline Exceeded","grpc_status":4}'
+ ]
+ }
+ end
+
context 'when there is no GRPC exception' do
let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
@@ -24,19 +36,47 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
end
end
- context 'when there is a GPRC exception with a debug string' do
+ context 'when there is a GRPC exception with a debug string' do
let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
- let(:data) do
- {
- extra: {
- caller: 'test'
- },
- fingerprint: [
- 'GRPC::DeadlineExceeded',
- '4:Deadline Exceeded. debug_error_string:{"created":"@1598938192.005782000","description":"Error received from peer unix:/home/git/gitalypraefect.socket","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Deadline Exceeded","grpc_status":4}'
- ]
- }
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash[:fingerprint])
+ .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+
+ context 'with no custom fingerprint' do
+ let(:data) do
+ { extra: { caller: 'test' } }
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(result_hash).not_to include(:fingerprint)
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
+ end
+ end
+ end
+
+ context 'when there is a wrapped GRPC exception with a debug string' do
+ let(:inner_exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+ let(:exception) do
+ begin
+ raise inner_exception
+ rescue GRPC::DeadlineExceeded
+ raise StandardError.new, inner_exception.message
+ end
+ rescue StandardError => e
+ e
end
it 'removes the debug error string and stores it as an extra field' do
@@ -46,6 +86,9 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+ expect(result_hash[:exception][:values].second)
+ .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
+
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
@@ -61,6 +104,9 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
expect(result_hash[:exception][:values].first)
.to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+ expect(result_hash[:exception][:values].second)
+ .to include(type: 'StandardError', value: '4:Deadline Exceeded.')
+
expect(result_hash[:extra])
.to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 336bf20d59c..706bcdea291 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -869,6 +869,128 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
+ describe '#blobs' do
+ let_it_be(:commit_oid) { '4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6' }
+
+ shared_examples 'a blob enumeration' do
+ it 'enumerates blobs' do
+ blobs = repository.blobs(revisions).to_a
+
+ expect(blobs.size).to eq(expected_blobs)
+ blobs.each do |blob|
+ expect(blob.data).to be_empty
+ expect(blob.id.size).to be(40)
+ end
+ end
+ end
+
+ context 'single revision' do
+ let(:revisions) { [commit_oid] }
+ let(:expected_blobs) { 53 }
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'multiple revisions' do
+ let(:revisions) { ["^#{commit_oid}~", commit_oid] }
+ let(:expected_blobs) { 1 }
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'pseudo revisions' do
+ let(:revisions) { ['master', '--not', '--all'] }
+ let(:expected_blobs) { 0 }
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'blank revisions' do
+ let(:revisions) { [::Gitlab::Git::BLANK_SHA] }
+ let(:expected_blobs) { 0 }
+
+ before do
+ expect_any_instance_of(Gitlab::GitalyClient::BlobService)
+ .not_to receive(:list_blobs)
+ end
+
+ it_behaves_like 'a blob enumeration'
+ end
+
+ context 'partially blank revisions' do
+ let(:revisions) { [::Gitlab::Git::BLANK_SHA, commit_oid] }
+ let(:expected_blobs) { 53 }
+
+ before do
+ expect_next_instance_of(Gitlab::GitalyClient::BlobService) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with([commit_oid], kind_of(Hash))
+ .and_call_original
+ end
+ end
+
+ it_behaves_like 'a blob enumeration'
+ end
+ end
+
+ describe '#new_commits' do
+ let(:repository) { mutable_repository }
+ let(:new_commit) do
+ author = { name: 'Test User', email: 'mail@example.com', time: Time.now }
+
+ Rugged::Commit.create(repository_rugged,
+ author: author,
+ committer: author,
+ message: "Message",
+ parents: [],
+ tree: "4b825dc642cb6eb9a060e54bf8d69288fbee4904")
+ end
+
+ let(:expected_commits) { 1 }
+ let(:revisions) { [new_commit] }
+
+ shared_examples 'an enumeration of new commits' do
+ it 'enumerates commits' do
+ commits = repository.new_commits(revisions).to_a
+
+ expect(commits.size).to eq(expected_commits)
+ commits.each do |commit|
+ expect(commit.id).to eq(new_commit)
+ expect(commit.message).to eq("Message")
+ end
+ end
+ end
+
+ context 'with list_commits disabled' do
+ before do
+ stub_feature_flags(list_commits: false)
+
+ expect_next_instance_of(Gitlab::GitalyClient::RefService) do |service|
+ expect(service)
+ .to receive(:list_new_commits)
+ .with(new_commit)
+ .and_call_original
+ end
+ end
+
+ it_behaves_like 'an enumeration of new commits'
+ end
+
+ context 'with list_commits enabled' do
+ before do
+ expect_next_instance_of(Gitlab::GitalyClient::CommitService) do |service|
+ expect(service)
+ .to receive(:list_commits)
+ .with([new_commit, '--not', '--all'])
+ .and_call_original
+ end
+ end
+
+ it_behaves_like 'an enumeration of new commits'
+ end
+ end
+
describe '#count_commits_between' do
subject { repository.count_commits_between('feature', 'master') }
diff --git a/spec/lib/gitlab/git/user_spec.rb b/spec/lib/gitlab/git/user_spec.rb
index 4414195ebf4..dfa68a7496c 100644
--- a/spec/lib/gitlab/git/user_spec.rb
+++ b/spec/lib/gitlab/git/user_spec.rb
@@ -7,15 +7,16 @@ RSpec.describe Gitlab::Git::User do
let(:name) { 'Jane Doé' }
let(:email) { 'janedoé@example.com' }
let(:gl_id) { 'user-123' }
+ let(:timezone) { 'Asia/Shanghai' }
let(:user) do
- described_class.new(username, name, email, gl_id)
+ described_class.new(username, name, email, gl_id, timezone)
end
- subject { described_class.new(username, name, email, gl_id) }
+ subject { described_class.new(username, name, email, gl_id, timezone) }
describe '.from_gitaly' do
let(:gitaly_user) do
- Gitaly::User.new(gl_username: username, name: name.b, email: email.b, gl_id: gl_id)
+ Gitaly::User.new(gl_username: username, name: name.b, email: email.b, gl_id: gl_id, timezone: timezone)
end
subject { described_class.from_gitaly(gitaly_user) }
@@ -25,34 +26,45 @@ RSpec.describe Gitlab::Git::User do
describe '.from_gitlab' do
context 'when no commit_email has been set' do
- let(:user) { build(:user, email: 'alice@example.com', commit_email: nil) }
+ let(:user) { build(:user, email: 'alice@example.com', commit_email: nil, timezone: timezone) }
subject { described_class.from_gitlab(user) }
- it { expect(subject).to eq(described_class.new(user.username, user.name, user.email, 'user-')) }
+ it { expect(subject).to eq(described_class.new(user.username, user.name, user.email, 'user-', timezone)) }
end
context 'when commit_email has been set' do
- let(:user) { build(:user, email: 'alice@example.com', commit_email: 'bob@example.com') }
+ let(:user) { build(:user, email: 'alice@example.com', commit_email: 'bob@example.com', timezone: timezone) }
subject { described_class.from_gitlab(user) }
- it { expect(subject).to eq(described_class.new(user.username, user.name, user.commit_email, 'user-')) }
+ it { expect(subject).to eq(described_class.new(user.username, user.name, user.commit_email, 'user-', timezone)) }
end
end
describe '#==' do
- def eq_other(username, name, email, gl_id)
- eq(described_class.new(username, name, email, gl_id))
+ def eq_other(username, name, email, gl_id, timezone)
+ eq(described_class.new(username, name, email, gl_id, timezone))
end
- it { expect(subject).to eq_other(username, name, email, gl_id) }
+ it { expect(subject).to eq_other(username, name, email, gl_id, timezone) }
- it { expect(subject).not_to eq_other(nil, nil, nil, nil) }
- it { expect(subject).not_to eq_other(username + 'x', name, email, gl_id) }
- it { expect(subject).not_to eq_other(username, name + 'x', email, gl_id) }
- it { expect(subject).not_to eq_other(username, name, email + 'x', gl_id) }
- it { expect(subject).not_to eq_other(username, name, email, gl_id + 'x') }
+ it { expect(subject).not_to eq_other(nil, nil, nil, nil, timezone) }
+ it { expect(subject).not_to eq_other(username + 'x', name, email, gl_id, timezone) }
+ it { expect(subject).not_to eq_other(username, name + 'x', email, gl_id, timezone) }
+ it { expect(subject).not_to eq_other(username, name, email + 'x', gl_id, timezone) }
+ it { expect(subject).not_to eq_other(username, name, email, gl_id + 'x', timezone) }
+ it { expect(subject).not_to eq_other(username, name, email, gl_id, 'Etc/UTC') }
+
+ context 'when add_timezone_to_web_operations is disabled' do
+ before do
+ stub_feature_flags(add_timezone_to_web_operations: false)
+ end
+
+ it 'ignores timezone arg and sets Etc/UTC by default' do
+ expect(user.timezone).to eq('Etc/UTC')
+ end
+ end
end
describe '#to_gitaly' do
@@ -69,6 +81,7 @@ RSpec.describe Gitlab::Git::User do
expect(subject.email).to be_a_binary_string
expect(subject.gl_id).to eq(gl_id)
+ expect(subject.timezone).to eq(timezone)
end
end
end
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index 777c94035d4..3b85e3ddd1d 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -140,8 +140,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when project is public but snippet feature is private' do
- let(:project) { create(:project, :public) }
-
before do
update_feature_access_level(project, :private)
end
@@ -151,7 +149,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when project is not accessible' do
- let(:project) { create(:project, :private) }
+ let_it_be(:project) { create(:project, :private) }
[:anonymous, :non_member].each do |membership|
context membership.to_s do
@@ -168,7 +166,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when project is archived' do
- let(:project) { create(:project, :public, :archived) }
+ let_it_be(:project) { create(:project, :public, :archived) }
[:anonymous, :non_member].each do |membership|
context membership.to_s do
@@ -214,7 +212,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
context 'when snippet feature is disabled' do
- let(:project) { create(:project, :public, :snippets_disabled) }
+ let_it_be(:project) { create(:project, :public, :snippets_disabled) }
[:anonymous, :non_member, :author, :admin].each do |membership|
context membership.to_s do
@@ -306,9 +304,9 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
describe 'repository size restrictions' do
- let(:snippet) { create(:personal_snippet, :public, :repository) }
- let(:actor) { snippet.author }
+ let_it_be(:snippet) { create(:personal_snippet, :public, :repository) }
+ let(:actor) { snippet.author }
let(:oldrev) { TestEnv::BRANCH_SHA["snippet/single-file"] }
let(:newrev) { TestEnv::BRANCH_SHA["snippet/edit-file"] }
let(:ref) { "refs/heads/snippet/edit-file" }
@@ -384,11 +382,12 @@ RSpec.describe Gitlab::GitAccessSnippet do
it_behaves_like 'a push to repository to make it over the limit'
end
- context 'when GIT_OBJECT_DIRECTORY_RELATIVE env var is not set' do
+ shared_examples_for 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset' do
let(:change_size) { 200 }
before do
- allow(snippet.repository).to receive(:new_blobs).and_return(
+ stub_feature_flags(git_access_batched_changes_size: batched)
+ allow(snippet.repository).to receive(expected_call).and_return(
[double(:blob, size: change_size)]
)
end
@@ -397,6 +396,20 @@ RSpec.describe Gitlab::GitAccessSnippet do
it_behaves_like 'a push to repository below the limit'
it_behaves_like 'a push to repository to make it over the limit'
end
+
+ context 'when batched computation is enabled' do
+ let(:batched) { true }
+ let(:expected_call) { :blobs }
+
+ it_behaves_like 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset'
+ end
+
+ context 'when batched computation is disabled' do
+ let(:batched) { false }
+ let(:expected_call) { :new_blobs }
+
+ it_behaves_like 'a change with GIT_OBJECT_DIRECTORY_RELATIVE env var unset'
+ end
end
describe 'HEAD realignment' do
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 3ee0310a9a2..bf682e4e4c6 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -265,7 +265,7 @@ RSpec.describe Gitlab::GitAccess do
it 'enqueues a redirected message for pushing' do
push_access_check
- expect(Gitlab::Checks::ProjectMoved.fetch_message(user.id, project.id)).not_to be_nil
+ expect(Gitlab::Checks::ContainerMoved.fetch_message(user, project.repository)).not_to be_nil
end
it 'allows push and pull access' do
@@ -435,7 +435,7 @@ RSpec.describe Gitlab::GitAccess do
it 'disallows users with expired password to pull' do
project.add_maintainer(user)
- user.update!(password_expires_at: 2.minutes.ago)
+ user.update!(password_expires_at: 2.minutes.ago, password_automatically_set: true)
expect { pull_access_check }.to raise_forbidden("Your password expired. Please access GitLab from a web browser to update your password.")
end
@@ -987,7 +987,7 @@ RSpec.describe Gitlab::GitAccess do
end
it 'disallows users with expired password to push' do
- user.update!(password_expires_at: 2.minutes.ago)
+ user.update!(password_expires_at: 2.minutes.ago, password_automatically_set: true)
expect { push_access_check }.to raise_forbidden("Your password expired. Please access GitLab from a web browser to update your password.")
end
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index f0ec58f3c2d..50078d8c127 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -88,4 +88,104 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
subject
end
end
+
+ describe '#list_blobs' do
+ let(:limit) { 0 }
+ let(:bytes_limit) { 0 }
+ let(:expected_params) { { revisions: revisions, limit: limit, bytes_limit: bytes_limit } }
+
+ before do
+ ::Gitlab::GitalyClient.clear_stubs!
+ end
+
+ subject { client.list_blobs(revisions, limit: limit, bytes_limit: bytes_limit) }
+
+ context 'with a single revision' do
+ let(:revisions) { ['master'] }
+
+ it 'sends a list_blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ subject
+ end
+ end
+
+ context 'with multiple revisions' do
+ let(:revisions) { ['master', '--not', '--all'] }
+
+ it 'sends a list_blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ subject
+ end
+ end
+
+ context 'with multiple revisions and limits' do
+ let(:revisions) { ['master', '--not', '--all'] }
+ let(:limit) { 10 }
+ let(:bytes_lmit) { 1024 }
+
+ it 'sends a list_blobs message' do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ subject
+ end
+ end
+
+ context 'with split contents' do
+ let(:revisions) { ['master'] }
+
+ it 'sends a list_blobs message', :aggregate_failures do
+ expect_next_instance_of(Gitaly::BlobService::Stub) do |service|
+ expect(service)
+ .to receive(:list_blobs)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([
+ Gitaly::ListBlobsResponse.new(blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(oid: "012345", size: 8, data: "0x01"),
+ Gitaly::ListBlobsResponse::Blob.new(data: "23")
+ ]),
+ Gitaly::ListBlobsResponse.new(blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(data: "45"),
+ Gitaly::ListBlobsResponse::Blob.new(oid: "56", size: 4, data: "0x5"),
+ Gitaly::ListBlobsResponse::Blob.new(data: "6")
+ ]),
+ Gitaly::ListBlobsResponse.new(blobs: [
+ Gitaly::ListBlobsResponse::Blob.new(oid: "78", size: 4, data: "0x78")
+ ])
+ ])
+ end
+
+ blobs = subject.to_a
+ expect(blobs.size).to be(3)
+
+ expect(blobs[0].id).to eq('012345')
+ expect(blobs[0].size).to eq(8)
+ expect(blobs[0].data).to eq('0x012345')
+
+ expect(blobs[1].id).to eq('56')
+ expect(blobs[1].size).to eq(4)
+ expect(blobs[1].data).to eq('0x56')
+
+ expect(blobs[2].id).to eq('78')
+ expect(blobs[2].size).to eq(4)
+ expect(blobs[2].data).to eq('0x78')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index ac4c42d57ee..22c29403255 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -287,6 +287,39 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
end
end
+ describe '#list_commits' do
+ shared_examples 'a ListCommits request' do
+ before do
+ ::Gitlab::GitalyClient.clear_stubs!
+ end
+
+ it 'sends a list_commits message' do
+ expect_next_instance_of(Gitaly::CommitService::Stub) do |service|
+ expect(service)
+ .to receive(:list_commits)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+ end
+
+ client.list_commits(revisions)
+ end
+ end
+
+ context 'with a single revision' do
+ let(:revisions) { 'master' }
+ let(:expected_params) { %w[master] }
+
+ it_behaves_like 'a ListCommits request'
+ end
+
+ context 'with multiple revisions' do
+ let(:revisions) { %w[master --not --all] }
+ let(:expected_params) { %w[master --not --all] }
+
+ it_behaves_like 'a ListCommits request'
+ end
+ end
+
describe '#commit_stats' do
let(:request) do
Gitaly::CommitStatsRequest.new(
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 9a17140a1e0..3789bc76a94 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitalyClient::OperationService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository.raw }
let(:client) { described_class.new(repository) }
let(:gitaly_user) { Gitlab::Git::User.from_gitlab(user).to_gitaly }
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 56c8fe20eca..53805d67f9f 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -209,19 +209,6 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
- describe '#rebase_in_progress?' do
- let(:rebase_id) { 1 }
-
- it 'sends a repository_rebase_in_progress message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:is_rebase_in_progress)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(in_progress: true))
-
- client.rebase_in_progress?(rebase_id)
- end
- end
-
describe '#squash_in_progress?' do
let(:squash_id) { 1 }
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
index 01d9edf0ba1..016f6e5377b 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
@@ -8,13 +8,14 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestMergedByImporter, :cle
let(:project) { merge_request.project }
let(:merged_at) { Time.new(2017, 1, 1, 12, 00).utc }
let(:client_double) { double(user: double(id: 999, login: 'merger', email: 'merger@email.com')) }
+ let(:merger_user) { double(id: 999, login: 'merger') }
let(:pull_request) do
instance_double(
Gitlab::GithubImport::Representation::PullRequest,
iid: merge_request.iid,
merged_at: merged_at,
- merged_by: double(id: 999, login: 'merger')
+ merged_by: merger_user
)
end
@@ -48,4 +49,23 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestMergedByImporter, :cle
expect(last_note.author).to eq(project.creator)
end
end
+
+ context 'when the merger user is not provided' do
+ let(:merger_user) { nil }
+
+ it 'adds a note referencing the merger user' do
+ expect { subject.execute }
+ .to change(Note, :count).by(1)
+ .and not_change(merge_request, :updated_at)
+
+ metrics = merge_request.metrics.reload
+ expect(metrics.merged_by).to be_nil
+ expect(metrics.merged_at).to eq(merged_at)
+
+ last_note = merge_request.notes.last
+ expect(last_note.note).to eq("*Merged by: ghost at 2017-01-01 12:00:00 UTC*")
+ expect(last_note.created_at).to eq(merged_at)
+ expect(last_note.author).to eq(project.creator)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index fa8b5e6ccf0..a6da40f47f1 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -167,6 +167,19 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
+ context 'when the submitted_at is not provided' do
+ let(:review) { create_review(type: 'APPROVED', note: '', submitted_at: nil) }
+
+ it 'creates a note for the review without the author information' do
+ expect { subject.execute }.to change(Note, :count).by(1)
+
+ last_note = merge_request.notes.last
+
+ expect(last_note.created_at)
+ .to be_within(1.second).of(merge_request.updated_at)
+ end
+ end
+
context 'when the review has a note text' do
context 'when the review is "APPROVED"' do
let(:review) { create_review(type: 'APPROVED') }
@@ -215,13 +228,15 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
- def create_review(type:, note: 'note', author: { id: 999, login: 'author' })
+ def create_review(type:, **extra)
Gitlab::GithubImport::Representation::PullRequestReview.from_json_hash(
- merge_request_id: merge_request.id,
- review_type: type,
- note: note,
- submitted_at: submitted_at.to_s,
- author: author
+ extra.reverse_merge(
+ author: { id: 999, login: 'author' },
+ merge_request_id: merge_request.id,
+ review_type: type,
+ note: 'note',
+ submitted_at: submitted_at.to_s
+ )
)
end
end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 22bf10f36d8..2d159580b5f 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -27,6 +27,13 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
expect(text.to_s).to eq('Hello')
end
+ it 'returns empty text when it receives nil' do
+ author = double(:author, login: nil)
+ text = described_class.new(nil, author, true)
+
+ expect(text.to_s).to eq('')
+ end
+
it 'returns the text with an extra header when the author was not found' do
author = double(:author, login: 'Alice')
text = described_class.new('Hello', author)
diff --git a/spec/lib/gitlab/github_import/object_counter_spec.rb b/spec/lib/gitlab/github_import/object_counter_spec.rb
new file mode 100644
index 00000000000..668c11667b5
--- /dev/null
+++ b/spec/lib/gitlab/github_import/object_counter_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::ObjectCounter, :clean_gitlab_redis_cache do
+ let_it_be(:project) { create(:project) }
+
+ it 'validates the operation being incremented' do
+ expect { described_class.increment(project, :issue, :unknown) }
+ .to raise_error(ArgumentError, 'Operation must be fetched or imported')
+ end
+
+ it 'increments the counter and saves the key to be listed in the summary later' do
+ expect(Gitlab::Metrics)
+ .to receive(:counter)
+ .twice
+ .with(:github_importer_fetched_issue, 'The number of fetched Github Issue')
+ .and_return(double(increment: true))
+
+ expect(Gitlab::Metrics)
+ .to receive(:counter)
+ .twice
+ .with(:github_importer_imported_issue, 'The number of imported Github Issue')
+ .and_return(double(increment: true))
+
+ described_class.increment(project, :issue, :fetched)
+ described_class.increment(project, :issue, :fetched)
+ described_class.increment(project, :issue, :imported)
+ described_class.increment(project, :issue, :imported)
+
+ expect(described_class.summary(project)).to eq({
+ 'fetched' => { 'issue' => 2 },
+ 'imported' => { 'issue' => 2 }
+ })
+ end
+end
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 1e31cd2f007..d56d4708385 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -11,6 +11,10 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
Class
end
+ def object_type
+ :dummy
+ end
+
def collection_method
:issues
end
diff --git a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
index f9763455468..cad9b13774e 100644
--- a/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/pull_request_review_spec.rb
@@ -68,5 +68,11 @@ RSpec.describe Gitlab::GithubImport::Representation::PullRequestReview do
expect(review.author).to be_nil
end
+
+ it 'does not fail when submitted_at is blank' do
+ review = described_class.from_json_hash(hash.except('submitted_at'))
+
+ expect(review.submitted_at).to be_nil
+ end
end
end
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 3129da64809..662757f66ad 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport do
context 'github.com' do
- let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git') }
+ let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git', id: 1) }
it 'returns a new Client with a custom token' do
expect(described_class::Client)
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 629e6c96858..71a4c693f9d 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -5,6 +5,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let_it_be(:project) { create(:project) }
let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) }
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) }
+
let(:project_path) { project.repository.full_path }
let(:wiki_path) { project.wiki.repository.full_path }
let(:design_path) { project.design_repository.full_path }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
new file mode 100644
index 00000000000..0047d24a215
--- /dev/null
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
@@ -0,0 +1,420 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/334973
+ # The spec will be merged with connection_spec.rb in the future.
+ let(:nodes) { Project.all.order(id: :asc) }
+ let(:arguments) { {} }
+ let(:query_type) { GraphQL::ObjectType.new }
+ let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
+ let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
+
+ let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) }
+ let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) }
+ let_it_be(:column_order_updated_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'updated_at', order_expression: Project.arel_table[:updated_at].asc) }
+ let_it_be(:column_order_created_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'created_at', order_expression: Project.arel_table[:created_at].asc) }
+ let_it_be(:column_order_last_repo) do
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'last_repository_check_at',
+ column_expression: Project.arel_table[:last_repository_check_at],
+ order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false)
+ end
+
+ let_it_be(:column_order_last_repo_desc) do
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'last_repository_check_at',
+ column_expression: Project.arel_table[:last_repository_check_at],
+ order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :desc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('last_repository_check_at', :asc),
+ order_direction: :desc,
+ nullable: :nulls_last,
+ distinct: false)
+ end
+
+ subject(:connection) do
+ described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
+ end
+
+ def encoded_cursor(node)
+ described_class.new(nodes, context: context).cursor_for(node)
+ end
+
+ def decoded_cursor(cursor)
+ Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor))
+ end
+
+ describe "With generic keyset order support" do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
+
+ it_behaves_like 'a connection with collection methods'
+
+ it_behaves_like 'a redactable connection' do
+ let_it_be(:projects) { create_list(:project, 2) }
+ let(:unwanted) { projects.second }
+ end
+
+ describe '#cursor_for' do
+ let(:project) { create(:project) }
+ let(:cursor) { connection.cursor_for(project) }
+
+ it 'returns an encoded ID' do
+ expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
+ end
+
+ context 'when an order is specified' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
+
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
+ end
+ end
+
+ context 'when multiple orders are specified' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_updated_at, column_order_created_at, column_order_id])) }
+
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
+ end
+ end
+ end
+
+ describe '#sliced_nodes' do
+ let(:projects) { create_list(:project, 4) }
+
+ context 'when before is passed' do
+ let(:arguments) { { before: encoded_cursor(projects[1]) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+ end
+ end
+
+ context 'when after is passed' do
+ let(:arguments) { { after: encoded_cursor(projects[1]) } }
+
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..-1])
+ end
+
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
+
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
+ end
+ end
+
+ context 'when both before and after are passed' do
+ let(:arguments) do
+ {
+ after: encoded_cursor(projects[1]),
+ before: encoded_cursor(projects[3])
+ }
+ end
+
+ it 'returns the expected set' do
+ expect(subject.sliced_nodes).to contain_exactly(projects[2])
+ end
+ end
+
+ shared_examples 'nodes are in ascending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'returns projects in ascending order' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes)
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes[2]) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.first(2))
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(ascending_nodes[1]) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.last(3))
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes.last), after: encoded_cursor(ascending_nodes.first) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes[1..3])
+ end
+ end
+ end
+
+ shared_examples 'nodes are in descending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
+
+ it 'only returns projects in descending order' do
+ expect(subject.sliced_nodes).to eq(descending_nodes)
+ end
+ end
+
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes[2]) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.first(2))
+ end
+ end
+
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(descending_nodes[1]) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.last(3))
+ end
+ end
+
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes.last), after: encoded_cursor(descending_nodes.first) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes[1..3])
+ end
+ end
+ end
+
+ context 'when multiple orders with nil values are defined' do
+ let_it_be(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
+ let_it_be(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
+ let_it_be(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
+ let_it_be(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
+ let_it_be(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
+
+ context 'when ascending' do
+ let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo, column_order_id]) }
+ let_it_be(:nodes) { Project.order(order) }
+ let_it_be(:ascending_nodes) { [project5, project1, project3, project2, project4] }
+
+ it_behaves_like 'nodes are in ascending order'
+
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
+ end
+ end
+
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
+ end
+
+ context 'when descending' do
+ let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo_desc, column_order_id]) }
+ let_it_be(:nodes) { Project.order(order) }
+ let_it_be(:descending_nodes) { [project3, project1, project5, project2, project4] }
+
+ it_behaves_like 'nodes are in descending order'
+
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
+
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
+ end
+ end
+
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
+ end
+ end
+
+ # rubocop: disable RSpec/EmptyExampleGroup
+ context 'when ordering uses LOWER' do
+ end
+ # rubocop: enable RSpec/EmptyExampleGroup
+
+ context 'when ordering by similarity' do
+ let_it_be(:project1) { create(:project, name: 'test') }
+ let_it_be(:project2) { create(:project, name: 'testing') }
+ let_it_be(:project3) { create(:project, name: 'tests') }
+ let_it_be(:project4) { create(:project, name: 'testing stuff') }
+ let_it_be(:project5) { create(:project, name: 'test') }
+
+ let_it_be(:nodes) do
+ # Note: sorted_by_similarity_desc scope internally supports the generic keyset order.
+ Project.sorted_by_similarity_desc('test', include_in_select: true)
+ end
+
+ let_it_be(:descending_nodes) { nodes.to_a }
+
+ it_behaves_like 'nodes are in descending order'
+ end
+
+ context 'when an invalid cursor is provided' do
+ let(:arguments) { { before: Base64Bp.urlsafe_encode64('invalidcursor', padding: false) } }
+
+ it 'raises an error' do
+ expect { subject.sliced_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+ end
+
+ describe '#nodes' do
+ let_it_be(:all_nodes) { create_list(:project, 5) }
+
+ let(:paged_nodes) { subject.nodes }
+
+ it_behaves_like 'connection with paged nodes' do
+ let(:paged_nodes_size) { 3 }
+ end
+
+ context 'when both are passed' do
+ let(:arguments) { { first: 2, last: 2 } }
+
+ it 'raises an error' do
+ expect { paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+
+ context 'when primary key is not in original order' do
+ let(:nodes) { Project.order(last_repository_check_at: :desc) }
+
+ it 'is added to end' do
+ sliced = subject.sliced_nodes
+
+ order_sql = sliced.order_values.last.to_sql
+
+ expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
+ end
+ end
+
+ context 'when there is no primary key' do
+ before do
+ stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
+ NoPrimaryKey.class_eval do
+ self.table_name = 'no_primary_key'
+ self.primary_key = nil
+ end
+ end
+
+ let(:nodes) { NoPrimaryKey.all }
+
+ it 'raises an error' do
+ expect(NoPrimaryKey.primary_key).to be_nil
+ expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
+ end
+ end
+ end
+
+ describe '#has_previous_page and #has_next_page' do
+ # using a list of 5 items with a max_page of 3
+ let_it_be(:project_list) { create_list(:project, 5) }
+ let_it_be(:nodes) { Project.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
+
+ context 'when default query' do
+ let(:arguments) { {} }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before is first item' do
+ let(:arguments) { { before: encoded_cursor(project_list.first) } }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ describe 'using `before`' do
+ context 'when before is the last item' do
+ let(:arguments) { { before: encoded_cursor(project_list.last) } }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last specified' do
+ let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ expect(subject.nodes).to eq [project_list[0]]
+ end
+ end
+ end
+
+ describe 'using `after`' do
+ context 'when after is the first item' do
+ let(:arguments) { { after: encoded_cursor(project_list.first) } }
+
+ it 'has a previous, and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when after and first specified' do
+ let(:arguments) { { after: encoded_cursor(project_list.first), first: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { after: encoded_cursor(project_list[2]), last: 3 } }
+
+ it 'has a previous but no next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_falsey
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 03030728834..8ef5f1147c5 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -355,6 +355,10 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
context 'when primary key is not in original order' do
let(:nodes) { Project.order(last_repository_check_at: :desc) }
+ before do
+ stub_feature_flags(new_graphql_keyset_pagination: false)
+ end
+
it 'is added to end' do
sliced = subject.sliced_nodes
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 009f66d2108..ec96a069b8f 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::GroupSearchResults do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, group: group) }
+
let(:filters) { {} }
let(:limit_projects) { Project.all }
let(:query) { 'gob' }
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 1f06019c929..ccb3ae1018a 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -50,9 +50,16 @@ RSpec.describe Gitlab::Highlight do
let(:result) { described_class.highlight(file_name, content) } # content is 44 bytes
before do
+ stub_feature_flags(one_megabyte_file_size_limit: false)
stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => 0.0001 } ) # 1.024 bytes
end
+ it 'confirm file size is 1MB when `one_megabyte_file_size_limit` is enabled' do
+ stub_feature_flags(one_megabyte_file_size_limit: true)
+ expect(described_class.too_large?(1024.kilobytes)).to eq(false)
+ expect(described_class.too_large?(1025.kilobytes)).to eq(true)
+ end
+
it 'increments the metric for oversized files' do
expect { result }.to change { over_highlight_size_limit('file size: 0.0001') }.by(1)
end
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index 8f976bcf09d..039b4c19522 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -48,6 +48,7 @@ RSpec.describe Gitlab::HookData::IssueBuilder do
expect(data).to include(:human_time_change)
expect(data).to include(:assignee_ids)
expect(data).to include(:state)
+ expect(data).to include(:severity)
expect(data).to include('labels' => [label.hook_attrs])
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 87a10b52b22..78805cea66a 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -54,6 +54,8 @@ issues:
- namespace
- note_authors
- issue_email_participants
+- test_reports
+- requirement
events:
- author
- project
@@ -196,6 +198,8 @@ merge_request_diff:
- merge_request_diff_files
merge_request_diff_commits:
- merge_request_diff
+- commit_author
+- committer
merge_request_diff_detail:
- merge_request_diff
merge_request_diff_files:
@@ -367,34 +371,34 @@ project:
- discord_integration
- drone_ci_integration
- emails_on_push_integration
-- pipelines_email_service
-- mattermost_slash_commands_service
-- slack_slash_commands_service
+- pipelines_email_integration
+- mattermost_slash_commands_integration
+- slack_slash_commands_integration
- irker_integration
-- packagist_service
-- pivotaltracker_service
-- prometheus_service
+- packagist_integration
+- pivotaltracker_integration
+- prometheus_integration
- flowdock_integration
- assembla_integration
- asana_integration
-- slack_service
-- microsoft_teams_service
-- mattermost_service
+- slack_integration
+- microsoft_teams_integration
+- mattermost_integration
- hangouts_chat_integration
-- unify_circuit_service
+- unify_circuit_integration
- buildkite_integration
- bamboo_integration
-- teamcity_service
-- pushover_service
-- jira_service
-- redmine_service
-- youtrack_service
+- teamcity_integration
+- pushover_integration
+- jira_integration
+- redmine_integration
+- youtrack_integration
- custom_issue_tracker_integration
- bugzilla_integration
- ewm_integration
- external_wiki_integration
-- mock_ci_service
-- mock_monitoring_service
+- mock_ci_integration
+- mock_monitoring_integration
- forked_to_members
- forked_from_project
- forks
@@ -480,12 +484,12 @@ project:
- kubernetes_namespaces
- error_tracking_setting
- metrics_setting
-- gitlab_slack_application_service
-- github_service
+- gitlab_slack_application_integration
+- github_integration
- protected_environments
- mirror_user
- push_rule
-- jenkins_service
+- jenkins_integration
- index_status
- feature_usage
- approval_rules
@@ -557,7 +561,7 @@ project:
- alert_management_alerts
- repository_storage_moves
- freeze_periods
-- webex_teams_service
+- webex_teams_integration
- build_report_results
- vulnerability_statistic
- vulnerability_historical_statistics
@@ -574,6 +578,7 @@ project:
- merge_request_metrics
- security_orchestration_policy_configuration
- timelogs
+- error_tracking_errors
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 7a9e7d8afba..9c6d2708607 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -109,14 +109,14 @@ RSpec.describe 'Test coverage of the Project Import' do
def failure_message(not_tested_relations)
<<~MSG
- These relations seem to be added recenty and
+ These relations seem to be added recently and
they expected to be covered in our Import specs: #{not_tested_relations}.
To do that, expand one of the files listed in `project_json_fixtures`
(or expand the list if you consider adding a new fixture file).
After that, add a new spec into
- `spec/lib/gitlab/import_export/project_tree_restorer_spec.rb`
+ `spec/lib/gitlab/import_export/project/tree_restorer_spec.rb`
to check that the relation is being imported correctly.
In case the spec breaks the master or there is a sense of urgency,
diff --git a/spec/lib/gitlab/import_export/project/object_builder_spec.rb b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
index 20d882c82be..4c9f9f7c690 100644
--- a/spec/lib/gitlab/import_export/project/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
@@ -150,4 +150,30 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
expect(merge_request.persisted?).to be true
end
end
+
+ context 'merge request diff commit users' do
+ it 'finds the existing user' do
+ user = MergeRequest::DiffCommitUser
+ .find_or_create('Alice', 'alice@example.com')
+
+ found = described_class.build(
+ MergeRequest::DiffCommitUser,
+ 'name' => 'Alice',
+ 'email' => 'alice@example.com'
+ )
+
+ expect(found).to eq(user)
+ end
+
+ it 'creates a new user' do
+ found = described_class.build(
+ MergeRequest::DiffCommitUser,
+ 'name' => 'Alice',
+ 'email' => 'alice@example.com'
+ )
+
+ expect(found.name).to eq('Alice')
+ expect(found.email).to eq('alice@example.com')
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 1b5fba85020..82f465c4f9e 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -224,6 +224,27 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
expect(MergeRequestDiffCommit.count).to eq(77)
end
+ it 'assigns committer and author details to all diff commits' do
+ MergeRequestDiffCommit.all.each do |commit|
+ expect(commit.commit_author_id).not_to be_nil
+ expect(commit.committer_id).not_to be_nil
+ end
+ end
+
+ it 'assigns the correct commit users to different diff commits' do
+ commit1 = MergeRequestDiffCommit
+ .find_by(sha: '0b4bc9a49b562e85de7cc9e834518ea6828729b9')
+
+ commit2 = MergeRequestDiffCommit
+ .find_by(sha: 'a4e5dfebf42e34596526acb8611bc7ed80e4eb3f')
+
+ expect(commit1.commit_author.name).to eq('Dmitriy Zaporozhets')
+ expect(commit1.commit_author.email).to eq('dmitriy.zaporozhets@gmail.com')
+
+ expect(commit2.commit_author.name).to eq('James Lopez')
+ expect(commit2.commit_author.email).to eq('james@jameslopez.es')
+ end
+
it 'has the correct data for merge request latest_merge_request_diff' do
MergeRequest.find_each do |merge_request|
expect(merge_request.latest_merge_request_diff_id).to eq(merge_request.merge_request_diffs.maximum(:id))
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 2173bee6b4b..77d126e012e 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -235,6 +235,10 @@ MergeRequestDiffCommit:
- committer_email
- message
- trailers
+MergeRequest::DiffCommitUser:
+- id
+- name
+- email
MergeRequestDiffFile:
- merge_request_diff_id
- relative_order
@@ -645,6 +649,7 @@ Timelog:
- spent_at
- created_at
- updated_at
+- summary
ProjectAutoDevops:
- id
- enabled
diff --git a/spec/lib/gitlab/import_export/shared_spec.rb b/spec/lib/gitlab/import_export/shared_spec.rb
index feeb88397eb..1945156ca59 100644
--- a/spec/lib/gitlab/import_export/shared_spec.rb
+++ b/spec/lib/gitlab/import_export/shared_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::ImportExport::Shared do
describe '#export_path' do
it 'uses a random hash relative to project path' do
- expect(subject.export_path).to match(/#{base_path}\h{32}\/\h{32}/)
+ expect(subject.export_path).to match(%r{#{base_path}\h{32}/\h{32}})
end
it 'memoizes the path' do
@@ -44,7 +44,7 @@ RSpec.describe Gitlab::ImportExport::Shared do
subject = described_class.new(group)
base_path = %(/tmp/gitlab_exports/@groups/)
- expect(subject.base_path).to match(/#{base_path}\h{2}\/\h{2}\/\h{64}/)
+ expect(subject.base_path).to match(%r{#{base_path}\h{2}/\h{2}/\h{64}})
end
end
end
diff --git a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
index fe934cadedd..c1661cf02b6 100644
--- a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::ImportExport::SnippetRepoRestorer do
expect(restorer.restore).to be_truthy
end.to change { SnippetRepository.count }.by(1)
- blob = snippet.repository.blob_at('HEAD', snippet.file_name)
+ blob = snippet.repository.blob_at(snippet.default_branch, snippet.file_name)
expect(blob).not_to be_nil
expect(blob.data).to eq(snippet.content)
end
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 28ae90d4947..48fcc9f93db 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -99,23 +99,6 @@ RSpec.describe Gitlab::InstrumentationHelper do
:mem_mallocs
)
end
-
- context 'when trace_memory_allocations is disabled' do
- before do
- stub_feature_flags(trace_memory_allocations: false)
- Gitlab::Memory::Instrumentation.ensure_feature_flag!
- end
-
- it 'does not log memory usage metrics' do
- subject
-
- expect(payload).not_to include(
- :mem_objects,
- :mem_bytes,
- :mem_mallocs
- )
- end
- end
end
context 'when load balancing is enabled' do
@@ -133,7 +116,37 @@ RSpec.describe Gitlab::InstrumentationHelper do
db_primary_count: 0,
db_primary_cached_count: 0,
db_primary_wal_count: 0,
- db_replica_wal_count: 0)
+ db_replica_wal_count: 0,
+ db_primary_wal_cached_count: 0,
+ db_replica_wal_cached_count: 0)
+ end
+
+ context 'when replica caught up search was made' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
+ end
+
+ it 'includes related metrics' do
+ subject
+
+ expect(payload).to include(caught_up_replica_pick_ok: 2)
+ expect(payload).to include(caught_up_replica_pick_fail: 1)
+ end
+ end
+
+ context 'when only a single counter was updated' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 1
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = nil
+ end
+
+ it 'includes only that counter into logging' do
+ subject
+
+ expect(payload).to include(caught_up_replica_pick_ok: 1)
+ expect(payload).not_to include(:caught_up_replica_pick_fail)
+ end
end
end
@@ -150,7 +163,9 @@ RSpec.describe Gitlab::InstrumentationHelper do
db_primary_count: 0,
db_primary_cached_count: 0,
db_primary_wal_count: 0,
- db_replica_wal_count: 0)
+ db_replica_wal_count: 0,
+ db_primary_wal_cached_count: 0,
+ db_replica_wal_cached_count: 0)
end
end
end
diff --git a/spec/lib/gitlab/integrations/sti_type_spec.rb b/spec/lib/gitlab/integrations/sti_type_spec.rb
index 3154872ed04..70b93d6a4b5 100644
--- a/spec/lib/gitlab/integrations/sti_type_spec.rb
+++ b/spec/lib/gitlab/integrations/sti_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL SELECT' do
let(:expected_sql) do
<<~SQL.strip
- SELECT "services".* FROM "services" WHERE "services"."type" = 'AsanaService'
+ SELECT "integrations".* FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
SQL
end
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL CREATE' do
let(:expected_sql) do
<<~SQL.strip
- INSERT INTO "services" ("type") VALUES ('AsanaService')
+ INSERT INTO "integrations" ("type") VALUES ('AsanaService')
SQL
end
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL UPDATE' do
let(:expected_sql) do
<<~SQL.strip
- UPDATE "services" SET "type" = 'AsanaService'
+ UPDATE "integrations" SET "type" = 'AsanaService'
SQL
end
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Integrations::StiType do
context 'SQL DELETE' do
let(:expected_sql) do
<<~SQL.strip
- DELETE FROM "services" WHERE "services"."type" = 'AsanaService'
+ DELETE FROM "integrations" WHERE "integrations"."type" = 'AsanaService'
SQL
end
diff --git a/spec/lib/gitlab/jira_import/base_importer_spec.rb b/spec/lib/gitlab/jira_import/base_importer_spec.rb
index 9d8143775f9..479551095de 100644
--- a/spec/lib/gitlab/jira_import/base_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/base_importer_spec.rb
@@ -9,10 +9,10 @@ RSpec.describe Gitlab::JiraImport::BaseImporter do
describe 'with any inheriting class' do
context 'when project validation is ok' do
- let!(:jira_service) { create(:jira_service, project: project) }
+ let!(:jira_integration) { create(:jira_integration, project: project) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
allow(Gitlab::JiraImport).to receive(:validate_project_settings!)
end
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index 4a32f0fd3a9..aead5405bd1 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -9,12 +9,12 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:jira_import) { create(:jira_import_state, project: project, user: current_user) }
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
subject { described_class.new(project) }
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
describe '#imported_items_cache_key' do
diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
index db98a83cb3c..71440590815 100644
--- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::JiraImport::LabelsImporter do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
- let_it_be(:jira_service) { create(:jira_service, project: project) }
+ let_it_be(:jira_integration) { create(:jira_integration, project: project) }
let(:importer) { described_class.new(project) }
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::JiraImport::LabelsImporter do
describe '#execute', :clean_gitlab_redis_cache do
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
context 'when label is missing from jira import' do
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index 94fdff984d5..a7c73e79641 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -31,12 +31,12 @@ RSpec.describe Gitlab::JiraImport do
end
end
- context 'when Jira service was not setup' do
+ context 'when Jira integration was not setup' do
it_behaves_like 'raise Jira import error', 'Jira integration not configured.'
end
- context 'when Jira service exists' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ context 'when Jira integration exists' do
+ let!(:jira_integration) { create(:jira_integration, project: project, active: true) }
context 'when Jira connection is not valid' do
before do
@@ -50,14 +50,14 @@ RSpec.describe Gitlab::JiraImport do
end
before do
- stub_jira_service_test
+ stub_jira_integration_test
end
context 'without user param' do
it_behaves_like 'jira configuration base checks'
context 'when jira connection is valid' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ let!(:jira_integration) { create(:jira_integration, project: project, active: true) }
it 'does not return any error' do
expect { subject }.not_to raise_error
@@ -77,8 +77,8 @@ RSpec.describe Gitlab::JiraImport do
it_behaves_like 'jira configuration base checks'
- context 'when jira service is configured' do
- let!(:jira_service) { create(:jira_service, project: project, active: true) }
+ context 'when jira integration is configured' do
+ let!(:jira_integration) { create(:jira_integration, project: project, active: true) }
context 'when issues feature is disabled' do
let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
@@ -96,7 +96,7 @@ RSpec.describe Gitlab::JiraImport do
context 'when user does not have permissions to run the import' do
before do
- create(:jira_service, project: project, active: true)
+ create(:jira_integration, project: project, active: true)
project.add_developer(user)
end
diff --git a/spec/lib/gitlab/json_cache_spec.rb b/spec/lib/gitlab/json_cache_spec.rb
index 563b3d35823..8265c3449bb 100644
--- a/spec/lib/gitlab/json_cache_spec.rb
+++ b/spec/lib/gitlab/json_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::JsonCache do
let_it_be(:broadcast_message) { create(:broadcast_message) }
+
let(:backend) { double('backend').as_null_object }
let(:namespace) { 'geo' }
let(:key) { 'foo' }
diff --git a/spec/lib/gitlab/kas/client_spec.rb b/spec/lib/gitlab/kas/client_spec.rb
index 7bf2d30ca48..40e18f58ee4 100644
--- a/spec/lib/gitlab/kas/client_spec.rb
+++ b/spec/lib/gitlab/kas/client_spec.rb
@@ -30,10 +30,11 @@ RSpec.describe Gitlab::Kas::Client do
describe 'gRPC calls' do
let(:token) { instance_double(JSONWebToken::HMACToken, encoded: 'test-token') }
+ let(:kas_url) { 'grpc://example.kas.internal' }
before do
allow(Gitlab::Kas).to receive(:enabled?).and_return(true)
- allow(Gitlab::Kas).to receive(:internal_url).and_return('grpc://example.kas.internal')
+ allow(Gitlab::Kas).to receive(:internal_url).and_return(kas_url)
expect(JSONWebToken::HMACToken).to receive(:new)
.with(Gitlab::Kas.secret)
@@ -80,5 +81,21 @@ RSpec.describe Gitlab::Kas::Client do
it { expect(subject).to eq(agent_configurations) }
end
+
+ describe 'with grpcs' do
+ let(:stub) { instance_double(Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub) }
+ let(:kas_url) { 'grpcs://example.kas.internal' }
+
+ it 'uses a ChannelCredentials object' do
+ expect(Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub).to receive(:new)
+ .with('example.kas.internal', instance_of(GRPC::Core::ChannelCredentials), timeout: described_class::TIMEOUT)
+ .and_return(stub)
+
+ allow(stub).to receive(:list_agent_config_files)
+ .and_return(double(config_files: []))
+
+ described_class.new.list_agent_config_files(project: project)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index c9d40f785b8..24d2b03fe2a 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -104,48 +104,4 @@ RSpec.describe Gitlab::Kas do
end
end
end
-
- describe '.included_in_gitlab_com_rollout?' do
- let_it_be(:project) { create(:project) }
-
- context 'not GitLab.com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
- end
-
- it 'returns true' do
- expect(described_class.included_in_gitlab_com_rollout?(project)).to be_truthy
- end
- end
-
- context 'GitLab.com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag disabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: false)
- end
-
- it 'returns false' do
- expect(described_class.included_in_gitlab_com_rollout?(project)).to be_falsey
- end
- end
-
- context 'kubernetes_agent_on_gitlab_com feature flag enabled' do
- before do
- stub_feature_flags(kubernetes_agent_on_gitlab_com: project)
- end
-
- it 'returns true' do
- expect(described_class.included_in_gitlab_com_rollout?(project)).to be_truthy
- end
-
- it 'returns false for another project' do
- expect(described_class.included_in_gitlab_com_rollout?(create(:project))).to be_falsey
- end
- end
- end
- end
end
diff --git a/spec/lib/gitlab/kroki_spec.rb b/spec/lib/gitlab/kroki_spec.rb
index 31d3edd158b..7d29d018ff1 100644
--- a/spec/lib/gitlab/kroki_spec.rb
+++ b/spec/lib/gitlab/kroki_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Kroki do
describe '.formats' do
def default_formats
- %w[bytefield c4plantuml ditaa erd graphviz nomnoml plantuml svgbob umlet vega vegalite wavedrom].freeze
+ %w[bytefield c4plantuml ditaa erd graphviz nomnoml pikchr plantuml svgbob umlet vega vegalite wavedrom].freeze
end
subject { described_class.formats(Gitlab::CurrentSettings) }
diff --git a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
index 0092c69d0bb..ec1f46100a4 100644
--- a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
@@ -206,6 +206,14 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
it { is_expected.to be_nil }
end
+
+ context 'with environment_ids' do
+ subject { Gitlab::Kubernetes::CiliumNetworkPolicy.from_resource(resource, [1, 2, 3]) }
+
+ it 'includes environment_ids in as_json result' do
+ expect(subject.as_json).to include(environment_ids: [1, 2, 3])
+ end
+ end
end
describe '#resource' do
diff --git a/spec/lib/gitlab/kubernetes/network_policy_spec.rb b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
index d3640c61d94..2cba37a1302 100644
--- a/spec/lib/gitlab/kubernetes/network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
@@ -196,6 +196,14 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
it { is_expected.to be_nil }
end
+
+ context 'with environment_ids' do
+ subject { Gitlab::Kubernetes::NetworkPolicy.from_resource(resource, [1, 2, 3]) }
+
+ it 'includes environment_ids in as_json result' do
+ expect(subject.as_json).to include(environment_ids: [1, 2, 3])
+ end
+ end
end
describe '#resource' do
diff --git a/spec/lib/gitlab/language_detection_spec.rb b/spec/lib/gitlab/language_detection_spec.rb
index 14523be8ec6..9430ecf7baf 100644
--- a/spec/lib/gitlab/language_detection_spec.rb
+++ b/spec/lib/gitlab/language_detection_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::LanguageDetection do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:ruby) { create(:programming_language, name: 'Ruby') }
let_it_be(:haskell) { create(:programming_language, name: 'Haskell') }
+
let(:repository) { project.repository }
let(:detection) do
[{ value: 66.63, label: "Ruby", color: "#701516", highlight: "#701516" },
diff --git a/spec/lib/gitlab/lfs_token_spec.rb b/spec/lib/gitlab/lfs_token_spec.rb
index 4b40e8960b2..a8472062f03 100644
--- a/spec/lib/gitlab/lfs_token_spec.rb
+++ b/spec/lib/gitlab/lfs_token_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
end
context 'when the user password is expired' do
- let(:actor) { create(:user, password_expires_at: 1.minute.ago) }
+ let(:actor) { create(:user, password_expires_at: 1.minute.ago, password_automatically_set: true) }
it 'returns false' do
expect(lfs_token.token_valid?(lfs_token.token)).to be false
diff --git a/spec/lib/gitlab/memory/instrumentation_spec.rb b/spec/lib/gitlab/memory/instrumentation_spec.rb
index 0dbe9a8e275..069c45da18a 100644
--- a/spec/lib/gitlab/memory/instrumentation_spec.rb
+++ b/spec/lib/gitlab/memory/instrumentation_spec.rb
@@ -18,24 +18,8 @@ RSpec.describe Gitlab::Memory::Instrumentation do
describe '.start_thread_memory_allocations' do
subject { described_class.start_thread_memory_allocations }
- context 'when feature flag trace_memory_allocations is enabled' do
- before do
- stub_feature_flags(trace_memory_allocations: true)
- end
-
- it 'a hash is returned' do
- is_expected.not_to be_empty
- end
- end
-
- context 'when feature flag trace_memory_allocations is disabled' do
- before do
- stub_feature_flags(trace_memory_allocations: false)
- end
-
- it 'a nil is returned' do
- is_expected.to be_nil
- end
+ it 'a hash is returned' do
+ is_expected.to be_a(Hash)
end
context 'when feature is unavailable' do
@@ -63,30 +47,14 @@ RSpec.describe Gitlab::Memory::Instrumentation do
expect(described_class).to receive(:measure_thread_memory_allocations).and_call_original
end
- context 'when feature flag trace_memory_allocations is enabled' do
- before do
- stub_feature_flags(trace_memory_allocations: true)
- end
-
- it 'a hash is returned' do
- result = subject
- expect(result).to include(
- mem_objects: be > 1000,
- mem_mallocs: be > 1000,
- mem_bytes: be > 100_000, # 100 items * 100 bytes each
- mem_total_bytes: eq(result[:mem_bytes] + 40 * result[:mem_objects])
- )
- end
- end
-
- context 'when feature flag trace_memory_allocations is disabled' do
- before do
- stub_feature_flags(trace_memory_allocations: false)
- end
-
- it 'a nil is returned' do
- is_expected.to be_nil
- end
+ it 'a hash is returned' do
+ result = subject
+ expect(result).to include(
+ mem_objects: be > 1000,
+ mem_mallocs: be > 1000,
+ mem_bytes: be > 100_000, # 100 items * 100 bytes each
+ mem_total_bytes: eq(result[:mem_bytes] + 40 * result[:mem_objects])
+ )
end
context 'when feature is unavailable' do
diff --git a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
index 153cf43be0a..0516091a8ec 100644
--- a/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/action_cable_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do
let(:subscriber) { described_class.new }
let(:counter) { double(:counter) }
- let(:data) { { data: { event: 'updated' } } }
+ let(:data) { { 'result' => { 'data' => { 'event' => 'updated' } } } }
let(:channel_class) { 'IssuesChannel' }
let(:event) do
double(
@@ -35,6 +35,17 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActionCable, :request_store do
subscriber.transmit(event)
end
+
+ it 'tracks size of payload as JSON' do
+ allow(::Gitlab::Metrics).to receive(:histogram).with(
+ :action_cable_transmitted_bytes, /transmit/
+ ).and_return(counter)
+ message_size = ::ActiveSupport::JSON.encode(data).bytesize
+
+ expect(counter).to receive(:observe).with({ channel: channel_class, operation: 'event' }, message_size)
+
+ subscriber.transmit(event)
+ end
end
describe '#broadcast' do
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index cffa62c3a52..6fc8f090431 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
let(:env) { {} }
let(:subscriber) { described_class.new }
- let(:connection) { double(:connection) }
+ let(:connection) { ActiveRecord::Base.connection }
describe '#transaction' do
let(:web_transaction) { double('Gitlab::Metrics::WebTransaction') }
@@ -183,6 +183,8 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
'SQL' | 'UPDATE users SET admin = true WHERE id = 10' | true | true | false | false
'SQL' | 'SELECT pg_current_wal_insert_lsn()::text AS location' | true | false | false | true
'SQL' | 'SELECT pg_last_wal_replay_lsn()::text AS location' | true | false | false | true
+ 'CACHE' | 'SELECT pg_current_wal_insert_lsn()::text AS location' | true | false | true | true
+ 'CACHE' | 'SELECT pg_last_wal_replay_lsn()::text AS location' | true | false | true | true
'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true | false
'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false | false
nil | 'BEGIN' | false | false | false | false
diff --git a/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb
new file mode 100644
index 00000000000..21a6573c6fd
--- /dev/null
+++ b/spec/lib/gitlab/metrics/subscribers/load_balancing_spec.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Subscribers::LoadBalancing, :request_store do
+ let(:subscriber) { described_class.new }
+
+ before do
+ allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+ end
+
+ describe '#caught_up_replica_pick' do
+ shared_examples 'having payload result value' do |result, counter_name|
+ subject { subscriber.caught_up_replica_pick(event) }
+
+ let(:payload) { { result: result } }
+
+ let(:event) do
+ double(
+ :event,
+ name: 'load_balancing.caught_up_replica_pick',
+ payload: payload
+ )
+ end
+
+ it 'stores per-request caught up replica search result' do
+ subject
+
+ expect(Gitlab::SafeRequestStore[counter_name]).to eq(1)
+ end
+ end
+
+ it_behaves_like 'having payload result value', true, :caught_up_replica_pick_ok
+ it_behaves_like 'having payload result value', false, :caught_up_replica_pick_fail
+ end
+
+ describe "#web_transaction_completed" do
+ subject { subscriber.web_transaction_completed(event) }
+
+ let(:event) do
+ double(
+ :event,
+ name: 'load_balancing.web_transaction_completed',
+ payload: {}
+ )
+ end
+
+ let(:web_transaction) { double('Gitlab::Metrics::WebTransaction') }
+
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(web_transaction)
+ end
+
+ context 'when no data in request store' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick] = nil
+ end
+
+ it 'does not change the counters' do
+ expect(web_transaction).not_to receive(:increment)
+ end
+ end
+
+ context 'when request store was updated' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
+ end
+
+ it 'increments :caught_up_replica_pick count with proper label' do
+ expect(web_transaction).to receive(:increment).with(:gitlab_transaction_caught_up_replica_pick_count_total, 2, { result: true })
+ expect(web_transaction).to receive(:increment).with(:gitlab_transaction_caught_up_replica_pick_count_total, 1, { result: false })
+
+ subject
+ end
+ end
+ end
+
+ describe '.load_balancing_payload' do
+ subject { described_class.load_balancing_payload }
+
+ context 'when no data in request store' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = nil
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = nil
+ end
+
+ it 'returns empty hash' do
+ expect(subject).to eq({})
+ end
+ end
+
+ context 'when request store was updated for a single counter' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ end
+
+ it 'returns proper payload with only that counter' do
+ expect(subject).to eq({ caught_up_replica_pick_ok: 2 })
+ end
+ end
+
+ context 'when both counters were updated' do
+ before do
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_ok] = 2
+ Gitlab::SafeRequestStore[:caught_up_replica_pick_fail] = 1
+ end
+
+ it 'return proper payload' do
+ expect(subject).to eq({ caught_up_replica_pick_ok: 2, caught_up_replica_pick_fail: 1 })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/object_hierarchy_spec.rb b/spec/lib/gitlab/object_hierarchy_spec.rb
index 7615b37521a..64161fbafdd 100644
--- a/spec/lib/gitlab/object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/object_hierarchy_spec.rb
@@ -9,265 +9,178 @@ RSpec.describe Gitlab::ObjectHierarchy do
let(:options) { {} }
- shared_context 'Gitlab::ObjectHierarchy test cases' do
- describe '#base_and_ancestors' do
- let(:relation) do
- described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors
- end
-
- it 'includes the base rows' do
- expect(relation).to include(child2)
- end
-
- it 'includes all of the ancestors' do
- expect(relation).to include(parent, child1)
- end
-
- it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2), options: options).base_and_ancestors(upto: child1)
-
- expect(relation).to contain_exactly(child2)
- end
-
- it 'uses ancestors_base #initialize argument' do
- relation = described_class.new(Group.where(id: child2.id), Group.none, options: options).base_and_ancestors
+ describe '#base_and_ancestors' do
+ let(:relation) do
+ described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors
+ end
- expect(relation).to include(parent, child1, child2)
- end
+ it 'includes the base rows' do
+ expect(relation).to include(child2)
+ end
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
+ it 'includes all of the ancestors' do
+ expect(relation).to include(parent, child1)
+ end
- describe 'hierarchy_order option' do
- let(:relation) do
- described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors(hierarchy_order: hierarchy_order)
- end
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2), options: options).base_and_ancestors(upto: child1)
- context ':asc' do
- let(:hierarchy_order) { :asc }
+ expect(relation).to contain_exactly(child2)
+ end
- it 'orders by child to parent' do
- expect(relation).to eq([child2, child1, parent])
- end
- end
+ it 'uses ancestors_base #initialize argument' do
+ relation = described_class.new(Group.where(id: child2.id), Group.none, options: options).base_and_ancestors
- context ':desc' do
- let(:hierarchy_order) { :desc }
+ expect(relation).to include(parent, child1, child2)
+ end
- it 'orders by parent to child' do
- expect(relation).to eq([parent, child1, child2])
- end
- end
- end
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
- describe '#base_and_descendants' do
+ describe 'hierarchy_order option' do
let(:relation) do
- described_class.new(Group.where(id: parent.id), options: options).base_and_descendants
- end
-
- it 'includes the base rows' do
- expect(relation).to include(parent)
- end
-
- it 'includes all the descendants' do
- expect(relation).to include(child1, child2)
+ described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors(hierarchy_order: hierarchy_order)
end
- it 'uses descendants_base #initialize argument' do
- relation = described_class.new(Group.none, Group.where(id: parent.id), options: options).base_and_descendants
+ context ':asc' do
+ let(:hierarchy_order) { :asc }
- expect(relation).to include(parent, child1, child2)
- end
-
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
-
- context 'when with_depth is true' do
- let(:relation) do
- described_class.new(Group.where(id: parent.id), options: options).base_and_descendants(with_depth: true)
+ it 'orders by child to parent' do
+ expect(relation).to eq([child2, child1, parent])
end
+ end
- it 'includes depth in the results' do
- object_depths = {
- parent.id => 1,
- child1.id => 2,
- child2.id => 3
- }
+ context ':desc' do
+ let(:hierarchy_order) { :desc }
- relation.each do |object|
- expect(object.depth).to eq(object_depths[object.id])
- end
+ it 'orders by parent to child' do
+ expect(relation).to eq([parent, child1, child2])
end
end
end
+ end
- describe '#descendants' do
- it 'includes only the descendants' do
- relation = described_class.new(Group.where(id: parent), options: options).descendants
-
- expect(relation).to contain_exactly(child1, child2)
- end
+ describe '#base_and_descendants' do
+ let(:relation) do
+ described_class.new(Group.where(id: parent.id), options: options).base_and_descendants
end
- describe '#max_descendants_depth' do
- subject { described_class.new(base_relation, options: options).max_descendants_depth }
-
- context 'when base relation is empty' do
- let(:base_relation) { Group.where(id: nil) }
-
- it { expect(subject).to be_nil }
- end
-
- context 'when base has no children' do
- let(:base_relation) { Group.where(id: child2) }
-
- it { expect(subject).to eq(1) }
- end
-
- context 'when base has grandchildren' do
- let(:base_relation) { Group.where(id: parent) }
-
- it { expect(subject).to eq(3) }
- end
+ it 'includes the base rows' do
+ expect(relation).to include(parent)
end
- describe '#ancestors' do
- it 'includes only the ancestors' do
- relation = described_class.new(Group.where(id: child2), options: options).ancestors
+ it 'includes all the descendants' do
+ expect(relation).to include(child1, child2)
+ end
- expect(relation).to contain_exactly(child1, parent)
- end
+ it 'uses descendants_base #initialize argument' do
+ relation = described_class.new(Group.none, Group.where(id: parent.id), options: options).base_and_descendants
- it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2), options: options).ancestors(upto: child1)
+ expect(relation).to include(parent, child1, child2)
+ end
- expect(relation).to be_empty
- end
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
- describe '#all_objects' do
+ context 'when with_depth is true' do
let(:relation) do
- described_class.new(Group.where(id: child1.id), options: options).all_objects
+ described_class.new(Group.where(id: parent.id), options: options).base_and_descendants(with_depth: true)
end
- it 'includes the base rows' do
- expect(relation).to include(child1)
- end
-
- it 'includes the ancestors' do
- expect(relation).to include(parent)
- end
+ it 'includes depth in the results' do
+ object_depths = {
+ parent.id => 1,
+ child1.id => 2,
+ child2.id => 3
+ }
- it 'includes the descendants' do
- expect(relation).to include(child2)
- end
-
- it 'uses ancestors_base #initialize argument for ancestors' do
- relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id), options: options).all_objects
-
- expect(relation).to include(parent)
+ relation.each do |object|
+ expect(object.depth).to eq(object_depths[object.id])
+ end
end
+ end
+ end
- it 'uses descendants_base #initialize argument for descendants' do
- relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id), options: options).all_objects
-
- expect(relation).to include(child2)
- end
+ describe '#descendants' do
+ it 'includes only the descendants' do
+ relation = described_class.new(Group.where(id: parent), options: options).descendants
- it 'does not allow the use of #update_all' do
- expect { relation.update_all(share_with_group_lock: false) }
- .to raise_error(ActiveRecord::ReadOnlyRecord)
- end
+ expect(relation).to contain_exactly(child1, child2)
end
end
- context 'when the use_distinct_in_object_hierarchy feature flag is enabled' do
- before do
- stub_feature_flags(use_distinct_in_object_hierarchy: true)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
- end
+ describe '#max_descendants_depth' do
+ subject { described_class.new(base_relation, options: options).max_descendants_depth }
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ context 'when base relation is empty' do
+ let(:base_relation) { Group.where(id: nil) }
- it 'calls DISTINCT' do
- expect(child2.self_and_ancestors.to_sql).to include("DISTINCT")
+ it { expect(subject).to be_nil }
end
- context 'when use_traversal_ids feature flag is enabled' do
- it 'does not call DISTINCT' do
- expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
- end
+ context 'when base has no children' do
+ let(:base_relation) { Group.where(id: child2) }
+
+ it { expect(subject).to eq(1) }
end
- context 'when use_traversal_ids feature flag is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
+ context 'when base has grandchildren' do
+ let(:base_relation) { Group.where(id: parent) }
- it 'calls DISTINCT' do
- expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
- end
+ it { expect(subject).to eq(3) }
end
end
- context 'when the use_distinct_for_all_object_hierarchy feature flag is enabled' do
- before do
- stub_feature_flags(use_distinct_in_object_hierarchy: false)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: true)
+ describe '#ancestors' do
+ it 'includes only the ancestors' do
+ relation = described_class.new(Group.where(id: child2), options: options).ancestors
+
+ expect(relation).to contain_exactly(child1, parent)
end
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2), options: options).ancestors(upto: child1)
- it 'calls DISTINCT' do
- expect(child2.self_and_ancestors.to_sql).to include("DISTINCT")
+ expect(relation).to be_empty
end
+ end
- context 'when use_traversal_ids feature flag is enabled' do
- it 'does not call DISTINCT' do
- expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
- end
+ describe '#all_objects' do
+ let(:relation) do
+ described_class.new(Group.where(id: child1.id), options: options).all_objects
end
- context 'when use_traversal_ids feature flag is disabled' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it 'calls DISTINCT' do
- expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
- end
+ it 'includes the base rows' do
+ expect(relation).to include(child1)
+ end
- context 'when the skip_ordering option is set' do
- let(:options) { { skip_ordering: true } }
+ it 'includes the ancestors' do
+ expect(relation).to include(parent)
+ end
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ it 'includes the descendants' do
+ expect(relation).to include(child2)
+ end
- it 'does not include ROW_NUMBER()' do
- query = described_class.new(Group.where(id: parent.id), options: options).base_and_descendants.to_sql
+ it 'uses ancestors_base #initialize argument for ancestors' do
+ relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id), options: options).all_objects
- expect(query).to include("DISTINCT")
- expect(query).not_to include("ROW_NUMBER()")
- end
- end
+ expect(relation).to include(parent)
end
- end
- context 'when the use_distinct_in_object_hierarchy feature flag is disabled' do
- before do
- stub_feature_flags(use_distinct_in_object_hierarchy: false)
- stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
- end
+ it 'uses descendants_base #initialize argument for descendants' do
+ relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id), options: options).all_objects
- it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+ expect(relation).to include(child2)
+ end
- it 'does not call DISTINCT' do
- expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
- expect(child2.self_and_ancestors.to_sql).not_to include("DISTINCT")
+ it 'does not allow the use of #update_all' do
+ expect { relation.update_all(share_with_group_lock: false) }
+ .to raise_error(ActiveRecord::ReadOnlyRecord)
end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
index 656ae73945e..d8e79287745 100644
--- a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -18,110 +18,127 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
Gitlab::Pagination::Keyset::Order.build([
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: column,
- column_expression: klass.arel_table[column],
- order_expression: ::Gitlab::Database.nulls_order(column, direction, nulls_position),
- reversed_order_expression: ::Gitlab::Database.nulls_order(column, reverse_direction, reverse_nulls_position),
- order_direction: direction,
- nullable: nulls_position,
- distinct: false
+ column_expression: klass.arel_table[column],
+ order_expression: ::Gitlab::Database.nulls_order(column, direction, nulls_position),
+ reversed_order_expression: ::Gitlab::Database.nulls_order(column, reverse_direction, reverse_nulls_position),
+ order_direction: direction,
+ nullable: nulls_position,
+ distinct: false
),
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'id',
- order_expression: klass.arel_table[:id].send(direction),
- add_to_projections: true
+ order_expression: klass.arel_table[:id].send(direction)
)
])
end
let(:scope) { project.issues.reorder(custom_reorder) }
- subject { described_class.new(scope: scope) }
+ shared_examples 'iterator examples' do
+ describe '.each_batch' do
+ it 'yields an ActiveRecord::Relation when a block is given' do
+ iterator.each_batch(of: 1) do |relation|
+ expect(relation).to be_a_kind_of(ActiveRecord::Relation)
+ end
+ end
- describe '.each_batch' do
- it 'yields an ActiveRecord::Relation when a block is given' do
- subject.each_batch(of: 1) do |relation|
- expect(relation).to be_a_kind_of(ActiveRecord::Relation)
+ it 'raises error when ordering configuration cannot be automatically determined' do
+ expect do
+ described_class.new(scope: MergeRequestDiffCommit.order(:merge_request_diff_id, :relative_order))
+ end.to raise_error /The order on the scope does not support keyset pagination/
end
- end
- it 'accepts a custom batch size' do
- count = 0
+ it 'accepts a custom batch size' do
+ count = 0
- subject.each_batch(of: 2) { |relation| count += relation.count(:all) }
+ iterator.each_batch(of: 2) { |relation| count += relation.count(:all) }
- expect(count).to eq(9)
- end
+ expect(count).to eq(9)
+ end
- it 'allows updating of the yielded relations' do
- time = Time.current
+ it 'allows updating of the yielded relations' do
+ time = Time.current
- subject.each_batch(of: 2) do |relation|
- relation.update_all(updated_at: time)
- end
+ iterator.each_batch(of: 2) do |relation|
+ Issue.connection.execute("UPDATE issues SET updated_at = '#{time.to_s(:inspect)}' WHERE id IN (#{relation.reselect(:id).to_sql})")
+ end
- expect(Issue.where(updated_at: time).count).to eq(9)
- end
+ expect(Issue.pluck(:updated_at)).to all(be_within(5.seconds).of(time))
+ end
- context 'with ordering direction' do
- context 'when ordering asc' do
- it 'orders ascending by default, including secondary order column' do
- positions = []
+ context 'with ordering direction' do
+ context 'when ordering asc' do
+ it 'orders ascending by default, including secondary order column' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.order_relative_position_asc.order(id: :asc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.order_relative_position_asc.order(id: :asc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when reversing asc order' do
- let(:scope) { project.issues.order(custom_reorder.reversed_order) }
+ context 'when reversing asc order' do
+ let(:scope) { project.issues.order(custom_reorder.reversed_order) }
- it 'orders in reverse of ascending' do
- positions = []
+ it 'orders in reverse of ascending' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.order_relative_position_desc.order(id: :desc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.order_relative_position_desc.order(id: :desc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when asc order, with nulls first' do
- let(:nulls_position) { :nulls_first }
+ context 'when asc order, with nulls first' do
+ let(:nulls_position) { :nulls_first }
- it 'orders ascending with nulls first' do
- positions = []
+ it 'orders ascending with nulls first' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when ordering desc' do
- let(:direction) { :desc }
- let(:nulls_position) { :nulls_last }
+ context 'when ordering desc' do
+ let(:direction) { :desc }
+ let(:nulls_position) { :nulls_last }
- it 'orders descending' do
- positions = []
+ it 'orders descending' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
- expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id))
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id))
+ end
end
- end
- context 'when ordering by columns are repeated twice' do
- let(:direction) { :desc }
- let(:column) { :id }
+ context 'when ordering by columns are repeated twice' do
+ let(:direction) { :desc }
+ let(:column) { :id }
- it 'orders descending' do
- positions = []
+ it 'orders descending' do
+ positions = []
- subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:id)) }
+ iterator.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:id)) }
- expect(positions).to eq(project.issues.reorder(id: :desc).pluck(:id))
+ expect(positions).to eq(project.issues.reorder(id: :desc).pluck(:id))
+ end
end
end
end
end
+
+ context 'when use_union_optimization is used' do
+ subject(:iterator) { described_class.new(scope: scope, use_union_optimization: true) }
+
+ include_examples 'iterator examples'
+ end
+
+ context 'when use_union_optimization is not used' do
+ subject(:iterator) { described_class.new(scope: scope, use_union_optimization: false) }
+
+ include_examples 'iterator examples'
+ end
end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index 26f52745b54..562a9bf4460 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -171,6 +171,12 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
it_behaves_like 'order examples'
+
+ it 'uses the row comparison method' do
+ sql = order.where_values_with_or_query({ year: 2010, month: 5, id: 1 }).to_sql
+
+ expect(sql).to eq('(("my_table"."year", "my_table"."month", "my_table"."id") > (2010, 5, 1))')
+ end
end
context 'when ordering by nullable columns and a distinct column' do
diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
index c9a23170137..f8d50fbc517 100644
--- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb
+++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
@@ -130,6 +130,80 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
end
end
+ context 'when resource already paginated' do
+ let(:resource) { Project.all.page(1).per(1) }
+
+ context 'when per_page param is specified' do
+ let(:query) { base_query.merge(page: 1, per_page: 2) }
+
+ it 'returns appropriate amount of resources based on per_page param' do
+ expect(subject.paginate(resource).count).to eq 2
+ end
+ end
+
+ context 'when page and per page params are strings' do
+ let(:query) { base_query.merge(page: '1', per_page: '1') }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+ end
+
+ context 'when per_page param is blank' do
+ let(:query) { base_query.merge(page: 1) }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+ end
+
+ context 'when page param is blank' do
+ let(:query) { base_query }
+
+ it 'returns appropriate amount of resources based on resource per(N)' do
+ expect(subject.paginate(resource).count).to eq 1
+ end
+ end
+ end
+
+ context 'when resource does not respond to limit_value' do
+ let(:custom_collection) do
+ Class.new do
+ include Enumerable
+
+ def initialize(items)
+ @collection = items
+ end
+
+ def each
+ @collection.each { |item| yield item }
+ end
+
+ def page(number)
+ Kaminari.paginate_array(@collection).page(number)
+ end
+ end
+ end
+
+ let(:resource) { custom_collection.new(Project.all).page(query[:page]) }
+
+ context 'when page param is blank' do
+ let(:query) { base_query }
+
+ it 'returns appropriate amount of resources' do
+ expect(subject.paginate(resource).count).to eq 3
+ end
+ end
+
+ context 'when per_page param is blank' do
+ let(:query) { base_query.merge(page: 1) }
+
+ it 'returns appropriate amount of resources with default per page value' do
+ expect(subject.paginate(resource).count).to eq 3
+ end
+ end
+ end
+
context 'when resource is a paginatable array' do
let(:resource) { Kaminari.paginate_array(Project.all.to_a) }
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 2f28b8dfce0..a9c0262fdb2 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:query) { 'hello world' }
let(:repository_ref) { nil }
let(:filters) { {} }
@@ -208,11 +209,10 @@ RSpec.describe Gitlab::ProjectSearchResults do
describe 'wiki search' do
let(:project) { create(:project, :public, :wiki_repo) }
- let(:wiki) { build(:project_wiki, project: project) }
before do
- wiki.create_page('Files/Title', 'Content')
- wiki.create_page('CHANGELOG', 'Files example')
+ project.wiki.create_page('Files/Title', 'Content')
+ project.wiki.create_page('CHANGELOG', 'Files example')
end
it_behaves_like 'general blob search', 'wiki', 'wiki_blobs' do
@@ -266,6 +266,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
+
let(:query) { 'foo' }
before do
diff --git a/spec/lib/gitlab/prometheus/adapter_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb
index 1eaed65c805..5320fbc7c4f 100644
--- a/spec/lib/gitlab/prometheus/adapter_spec.rb
+++ b/spec/lib/gitlab/prometheus/adapter_spec.rb
@@ -9,31 +9,31 @@ RSpec.describe Gitlab::Prometheus::Adapter do
subject { described_class.new(project, cluster) }
describe '#prometheus_adapter' do
- context 'prometheus service can execute queries' do
- let(:prometheus_service) { double(:prometheus_service, can_query?: true) }
+ context 'prometheus integration can execute queries' do
+ let(:prometheus_integration) { double(:prometheus_integration, can_query?: true) }
before do
- allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ allow(project).to receive(:find_or_initialize_integration).with('prometheus').and_return prometheus_integration
end
- it 'return prometheus service as prometheus adapter' do
- expect(subject.prometheus_adapter).to eq(prometheus_service)
+ it 'return prometheus integration as prometheus adapter' do
+ expect(subject.prometheus_adapter).to eq(prometheus_integration)
end
context 'with cluster with prometheus available' do
let!(:prometheus) { create(:clusters_integrations_prometheus, cluster: cluster) }
- it 'returns prometheus service' do
- expect(subject.prometheus_adapter).to eq(prometheus_service)
+ it 'returns prometheus integration' do
+ expect(subject.prometheus_adapter).to eq(prometheus_integration)
end
end
end
- context "prometheus service can't execute queries" do
- let(:prometheus_service) { double(:prometheus_service, can_query?: false) }
+ context "prometheus integration can't execute queries" do
+ let(:prometheus_integration) { double(:prometheus_integration, can_query?: false) }
before do
- allow(project).to receive(:find_or_initialize_service).with('prometheus').and_return prometheus_service
+ allow(project).to receive(:find_or_initialize_integration).with('prometheus').and_return prometheus_integration
end
context 'with cluster with prometheus disabled' do
diff --git a/spec/lib/gitlab/prometheus/query_variables_spec.rb b/spec/lib/gitlab/prometheus/query_variables_spec.rb
index 1dbdb892a5d..d9cac3e1064 100644
--- a/spec/lib/gitlab/prometheus/query_variables_spec.rb
+++ b/spec/lib/gitlab/prometheus/query_variables_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Prometheus::QueryVariables do
describe '.call' do
let_it_be_with_refind(:environment) { create(:environment) }
+
let(:project) { environment.project }
let(:slug) { environment.slug }
let(:params) { {} }
diff --git a/spec/lib/gitlab/rate_limit_helpers_spec.rb b/spec/lib/gitlab/rate_limit_helpers_spec.rb
index e7d4c69d47b..d583c8e58fb 100644
--- a/spec/lib/gitlab/rate_limit_helpers_spec.rb
+++ b/spec/lib/gitlab/rate_limit_helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::RateLimitHelpers, :clean_gitlab_redis_cache do
let(:limiter_class) do
Class.new do
include ::Gitlab::RateLimitHelpers
diff --git a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
index 19fb2ada476..f405b2ad86e 100644
--- a/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
+++ b/spec/lib/gitlab/reactive_cache_set_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ReactiveCacheSetCache, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
+
let(:cache_prefix) { 'cache_prefix' }
let(:expires_in) { 10.minutes }
let(:cache) { described_class.new(expires_in: expires_in) }
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 229d49868d4..f6e69aa6533 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -227,7 +227,7 @@ RSpec.describe Gitlab::ReferenceExtractor do
context 'with an inactive external issue tracker' do
let(:project) { create(:project) }
- let!(:jira_service) { create(:jira_service, project: project, active: false) }
+ let!(:jira_integration) { create(:jira_integration, project: project, active: false) }
let(:issue) { create(:issue, project: project) }
context 'when GitLab issues are enabled' do
@@ -315,6 +315,7 @@ RSpec.describe Gitlab::ReferenceExtractor do
describe '#references' do
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:text) { "Ref. #{issue.to_reference}" }
subject { described_class.new(project, user) }
diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb
index 912efa6a5db..6cff0eff7e8 100644
--- a/spec/lib/gitlab/repo_path_spec.rb
+++ b/spec/lib/gitlab/repo_path_spec.rb
@@ -13,11 +13,11 @@ RSpec.describe ::Gitlab::RepoPath do
describe '.parse' do
context 'a repository storage path' do
- it 'parses a full repository project path' do
+ it 'parses a full project repository path' do
expect(described_class.parse(project.repository.full_path)).to eq([project, project, Gitlab::GlRepository::PROJECT, nil])
end
- it 'parses a full wiki project path' do
+ it 'parses a full project wiki repository path' do
expect(described_class.parse(project.wiki.repository.full_path)).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, nil])
end
@@ -49,7 +49,7 @@ RSpec.describe ::Gitlab::RepoPath do
end
it 'parses a relative wiki path' do
- expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, redirect_route])
+ expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, "#{redirect_route}.wiki"])
end
it 'parses a relative path starting with /' do
@@ -57,7 +57,7 @@ RSpec.describe ::Gitlab::RepoPath do
end
it 'parses a redirected project snippet repository path' do
- expect(described_class.parse(redirect.path + "/snippets/#{project_snippet.id}.git")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, redirect_route])
+ expect(described_class.parse(redirect.path + "/snippets/#{project_snippet.id}.git")).to eq([project_snippet, project, Gitlab::GlRepository::SNIPPET, "#{redirect_route}/snippets/#{project_snippet.id}"])
end
end
end
@@ -70,8 +70,8 @@ RSpec.describe ::Gitlab::RepoPath do
describe '.find_project' do
context 'when finding a project by its canonical path' do
context 'when the cases match' do
- it 'returns the project and nil' do
- expect(described_class.find_project(project.full_path)).to eq([project, nil])
+ it 'returns the project' do
+ expect(described_class.find_project(project.full_path)).to eq(project)
end
end
@@ -80,45 +80,45 @@ RSpec.describe ::Gitlab::RepoPath do
# easy and safe to redirect someone to the correctly-cased URL. For git
# requests, we should accept wrongly-cased URLs because it is a pain to
# block people's git operations and force them to update remote URLs.
- it 'returns the project and nil' do
- expect(described_class.find_project(project.full_path.upcase)).to eq([project, nil])
+ it 'returns the project' do
+ expect(described_class.find_project(project.full_path.upcase)).to eq(project)
end
end
end
context 'when finding a project via a redirect' do
- it 'returns the project and nil' do
- expect(described_class.find_project(redirect.path)).to eq([project, redirect.path])
+ it 'returns the project' do
+ expect(described_class.find_project(redirect.path)).to eq(project)
end
end
end
describe '.find_snippet' do
it 'extracts path and id from personal snippet route' do
- expect(described_class.find_snippet("snippets/#{personal_snippet.id}")).to eq([personal_snippet, nil])
+ expect(described_class.find_snippet("snippets/#{personal_snippet.id}")).to eq(personal_snippet)
end
it 'extracts path and id from project snippet route' do
- expect(described_class.find_snippet("#{project.full_path}/snippets/#{project_snippet.id}")).to eq([project_snippet, nil])
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{project_snippet.id}")).to eq(project_snippet)
end
it 'returns nil for invalid snippet paths' do
aggregate_failures do
- expect(described_class.find_snippet("snippets/#{project_snippet.id}")).to eq([nil, nil])
- expect(described_class.find_snippet("#{project.full_path}/snippets/#{personal_snippet.id}")).to eq([nil, nil])
- expect(described_class.find_snippet('')).to eq([nil, nil])
+ expect(described_class.find_snippet("snippets/#{project_snippet.id}")).to be_nil
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{personal_snippet.id}")).to be_nil
+ expect(described_class.find_snippet('')).to be_nil
end
end
it 'returns nil for snippets not associated with the project' do
snippet = create(:project_snippet)
- expect(described_class.find_snippet("#{project.full_path}/snippets/#{snippet.id}")).to eq([nil, nil])
+ expect(described_class.find_snippet("#{project.full_path}/snippets/#{snippet.id}")).to be_nil
end
context 'when finding a project snippet via a redirect' do
- it 'returns the project and true' do
- expect(described_class.find_snippet("#{redirect.path}/snippets/#{project_snippet.id}")).to eq([project_snippet, redirect.path])
+ it 'returns the project snippet' do
+ expect(described_class.find_snippet("#{redirect.path}/snippets/#{project_snippet.id}")).to eq(project_snippet)
end
end
end
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 9aeb9f11bac..4dcf9dc2c05 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
shared_examples 'cache_key examples' do
it 'includes the namespace' do
- is_expected.to eq("foo:#{namespace}:set")
+ is_expected.to eq("#{gitlab_cache_namespace}:foo:#{namespace}:set")
end
context 'with a given namespace' do
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
let(:cache) { described_class.new(repository, extra_namespace: extra_namespace) }
it 'includes the full namespace' do
- is_expected.to eq("foo:#{namespace}:#{extra_namespace}:set")
+ is_expected.to eq("#{gitlab_cache_namespace}:foo:#{namespace}:#{extra_namespace}:set")
end
end
end
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
write_cache
redis_keys = Gitlab::Redis::Cache.with { |redis| redis.scan(0, match: "*") }.last
- expect(redis_keys).to include("branch_names:#{namespace}:set")
+ expect(redis_keys).to include("#{gitlab_cache_namespace}:branch_names:#{namespace}:set")
expect(cache.fetch('branch_names')).to contain_exactly('main')
end
@@ -95,8 +95,8 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
expect(cache.read(:foo)).to be_empty
end
- it 'expires the new key format' do
- expect_any_instance_of(Redis).to receive(:unlink).with(cache.cache_key(:foo), cache.new_cache_key(:foo)) # rubocop:disable RSpec/AnyInstanceOf
+ it 'expires the old key format' do
+ expect_any_instance_of(Redis).to receive(:unlink).with(cache.cache_key(:foo), cache.old_cache_key(:foo)) # rubocop:disable RSpec/AnyInstanceOf
subject
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index a1b18172a31..2974893ec4a 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::SearchResults do
let_it_be(:project) { create(:project, name: 'foo') }
let_it_be(:issue) { create(:issue, project: project, title: 'foo') }
let_it_be(:milestone) { create(:milestone, project: project, title: 'foo') }
+
let(:merge_request) { create(:merge_request, source_project: project, title: 'foo') }
let(:query) { 'foo' }
let(:filters) { {} }
@@ -228,10 +229,18 @@ RSpec.describe Gitlab::SearchResults do
let!(:new_updated) { create(:issue, project: project, title: 'updated recent', updated_at: 1.day.ago) }
let!(:very_old_updated) { create(:issue, project: project, title: 'updated very old', updated_at: 1.year.ago) }
+ let!(:less_popular_result) { create(:issue, project: project, title: 'less popular', upvotes_count: 10) }
+ let!(:popular_result) { create(:issue, project: project, title: 'popular', upvotes_count: 100) }
+ let!(:non_popular_result) { create(:issue, project: project, title: 'non popular', upvotes_count: 1) }
+
include_examples 'search results sorted' do
let(:results_created) { described_class.new(user, 'sorted', Project.order(:id), sort: sort, filters: filters) }
let(:results_updated) { described_class.new(user, 'updated', Project.order(:id), sort: sort, filters: filters) }
end
+
+ include_examples 'search results sorted by popularity' do
+ let(:results_popular) { described_class.new(user, 'popular', Project.order(:id), sort: sort, filters: filters) }
+ end
end
end
diff --git a/spec/lib/gitlab/shell_spec.rb b/spec/lib/gitlab/shell_spec.rb
index b0dc34e8abf..891b3639709 100644
--- a/spec/lib/gitlab/shell_spec.rb
+++ b/spec/lib/gitlab/shell_spec.rb
@@ -5,6 +5,7 @@ require 'stringio'
RSpec.describe Gitlab::Shell do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
let(:gitlab_shell) { described_class.new }
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index d216b9d0c18..d2a53185acd 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -122,4 +122,43 @@ RSpec.describe Gitlab::SidekiqConfig do
expect(described_class.sidekiq_queues_yml_outdated?).to be(false)
end
end
+
+ describe '.worker_queue_mappings' do
+ it 'returns the worker class => queue mappings based on the current routing configuration' do
+ test_routes = [
+ ['urgency=high', 'default'],
+ ['*', nil]
+ ]
+
+ allow(::Gitlab::SidekiqConfig::WorkerRouter)
+ .to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
+
+ expect(described_class.worker_queue_mappings).to include('MergeWorker' => 'default',
+ 'Ci::BuildFinishedWorker' => 'default',
+ 'BackgroundMigrationWorker' => 'background_migration',
+ 'AdminEmailWorker' => 'cronjob:admin_email')
+ end
+ end
+
+ describe '.current_worker_queue_mappings' do
+ it 'returns worker queue mappings that have queues in the current Sidekiq options' do
+ test_routes = [
+ ['urgency=high', 'default'],
+ ['*', nil]
+ ]
+
+ allow(::Gitlab::SidekiqConfig::WorkerRouter)
+ .to receive(:global).and_return(::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes))
+
+ allow(Sidekiq).to receive(:options).and_return(queues: %w[default background_migration])
+
+ mappings = described_class.current_worker_queue_mappings
+
+ expect(mappings).to include('MergeWorker' => 'default',
+ 'Ci::BuildFinishedWorker' => 'default',
+ 'BackgroundMigrationWorker' => 'background_migration')
+
+ expect(mappings).not_to include('AdminEmailWorker' => 'cronjob:admin_email')
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index dfdc1420eac..4406b34e638 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -298,6 +298,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
allow(Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
end
+ let(:dbname) { ::Gitlab::Database.dbname(ActiveRecord::Base.connection) }
+
let(:expected_end_payload_with_db) do
expected_end_payload.merge(
'db_duration_s' => a_value >= 0.1,
@@ -311,7 +313,10 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'db_primary_count' => a_value >= 1,
'db_primary_cached_count' => 0,
'db_primary_wal_count' => 0,
- 'db_primary_duration_s' => a_value > 0
+ 'db_primary_duration_s' => a_value > 0,
+ "db_primary_#{dbname}_duration_s" => a_value > 0,
+ 'db_primary_wal_cached_count' => 0,
+ 'db_replica_wal_cached_count' => 0
)
end
@@ -333,6 +338,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'db_primary_count' => 0,
'db_primary_cached_count' => 0,
'db_primary_wal_count' => 0,
+ 'db_primary_wal_cached_count' => 0,
+ 'db_replica_wal_cached_count' => 0,
'db_primary_duration_s' => 0
)
end
@@ -342,7 +349,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
context 'when the job uses load balancing capabilities' do
- let(:expected_payload) { { 'database_chosen' => 'retry' } }
+ let(:expected_payload) { { 'load_balancing_strategy' => 'retry' } }
before do
allow(Time).to receive(:now).and_return(timestamp)
@@ -354,7 +361,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(logger).to receive(:info).with(include(expected_payload)).ordered
call_subject(job, 'test_queue') do
- job[:database_chosen] = 'retry'
+ job['load_balancing_strategy'] = 'retry'
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
index 82ca84f0697..698758a13fd 100644
--- a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::ClientMetrics do
+ let(:enqueued_jobs_metric) { double('enqueued jobs metric', increment: true) }
+
shared_examples "a metrics middleware" do
context "with mocked prometheus" do
- let(:enqueued_jobs_metric) { double('enqueued jobs metric', increment: true) }
-
before do
+ labels[:scheduling] = 'immediate'
allow(Gitlab::Metrics).to receive(:counter).with(described_class::ENQUEUED, anything).and_return(enqueued_jobs_metric)
end
@@ -32,4 +33,35 @@ RSpec.describe Gitlab::SidekiqMiddleware::ClientMetrics do
end
it_behaves_like 'metrics middleware with worker attribution'
+
+ context 'when mounted' do
+ before do
+ stub_const('TestWorker', Class.new)
+ TestWorker.class_eval do
+ include Sidekiq::Worker
+
+ def perform(*args)
+ end
+ end
+
+ allow(Gitlab::Metrics).to receive(:counter).and_return(Gitlab::Metrics::NullMetric.instance)
+ allow(Gitlab::Metrics).to receive(:counter).with(described_class::ENQUEUED, anything).and_return(enqueued_jobs_metric)
+ end
+
+ context 'when scheduling jobs for immediate execution' do
+ it 'increments enqueued jobs metric with scheduling label set to immediate' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(a_hash_including(scheduling: 'immediate'), 1)
+
+ Sidekiq::Testing.inline! { TestWorker.perform_async }
+ end
+ end
+
+ context 'when scheduling jobs for future execution' do
+ it 'increments enqueued jobs metric with scheduling label set to delayed' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(a_hash_including(scheduling: 'delayed'), 1)
+
+ Sidekiq::Testing.inline! { TestWorker.perform_in(1.second) }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index a10a8883591..d67cb95f483 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
let(:queue) { 'authorized_projects' }
let(:idempotency_key) do
- hash = Digest::SHA256.hexdigest("#{job['class']}:#{job['args'].join('-')}")
+ hash = Digest::SHA256.hexdigest("#{job['class']}:#{Sidekiq.dump_json(job['args'])}")
"#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:duplicate:#{queue}:#{hash}"
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 34b4541f339..3ec8d404bf0 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -8,11 +8,77 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
context "with mocked prometheus" do
include_context 'server metrics with mocked prometheus'
- describe '#initialize' do
+ describe '.initialize_process_metrics' do
it 'sets concurrency metrics' do
expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
- subject
+ described_class.initialize_process_metrics
+ end
+
+ it 'initializes sidekiq_jobs_completion_seconds for the workers in the current Sidekiq process' do
+ allow(Gitlab::SidekiqConfig)
+ .to receive(:current_worker_queue_mappings)
+ .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'merge',
+ worker: 'MergeWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'source_code_management',
+ boundary: '',
+ job_status: 'done')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'merge',
+ worker: 'MergeWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'source_code_management',
+ boundary: '',
+ job_status: 'fail')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'default',
+ worker: 'Ci::BuildFinishedWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'continuous_integration',
+ boundary: 'cpu',
+ job_status: 'done')
+
+ expect(completion_seconds_metric)
+ .to receive(:get).with(queue: 'default',
+ worker: 'Ci::BuildFinishedWorker',
+ urgency: 'high',
+ external_dependencies: 'no',
+ feature_category: 'continuous_integration',
+ boundary: 'cpu',
+ job_status: 'fail')
+
+ described_class.initialize_process_metrics
+ end
+
+ context 'when the sidekiq_job_completion_metric_initialize feature flag is disabled' do
+ before do
+ stub_feature_flags(sidekiq_job_completion_metric_initialize: false)
+ end
+
+ it 'sets the concurrency metric' do
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
+
+ described_class.initialize_process_metrics
+ end
+
+ it 'does not initialize sidekiq_jobs_completion_seconds' do
+ allow(Gitlab::SidekiqConfig)
+ .to receive(:current_worker_queue_mappings)
+ .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
+
+ expect(completion_seconds_metric).not_to receive(:get)
+
+ described_class.initialize_process_metrics
+ end
end
end
@@ -47,6 +113,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
subject.call(worker, job, :test) { nil }
end
+ it 'sets sidekiq_jobs_completion_seconds values that are compatible with those from .initialize_process_metrics' do
+ label_validator = Prometheus::Client::LabelSetValidator.new([:le])
+
+ allow(Gitlab::SidekiqConfig)
+ .to receive(:current_worker_queue_mappings)
+ .and_return('MergeWorker' => 'merge', 'Ci::BuildFinishedWorker' => 'default')
+
+ allow(completion_seconds_metric).to receive(:get) do |labels|
+ expect { label_validator.validate(labels) }.not_to raise_error
+ end
+
+ allow(completion_seconds_metric).to receive(:observe) do |labels, _duration|
+ expect { label_validator.validate(labels) }.not_to raise_error
+ end
+
+ described_class.initialize_process_metrics
+
+ subject.call(worker, job, :test) { nil }
+ end
+
it 'sets the thread name if it was nil' do
allow(Thread.current).to receive(:name).and_return(nil)
expect(Thread.current).to receive(:name=).with(Gitlab::Metrics::Samplers::ThreadsSampler::SIDEKIQ_WORKER_THREAD_NAME)
@@ -109,22 +195,20 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
context 'DB load balancing' do
- using RSpec::Parameterized::TableSyntax
-
subject { described_class.new }
let(:queue) { :test }
let(:worker_class) { worker.class }
- let(:job) { {} }
- let(:job_status) { :done }
- let(:labels_with_job_status) { default_labels.merge(job_status: job_status.to_s) }
- let(:default_labels) do
- { queue: queue.to_s,
- worker: worker_class.to_s,
- boundary: "",
- external_dependencies: "no",
- feature_category: "",
- urgency: "low" }
+ let(:worker) { TestWorker.new }
+ let(:client_middleware) { Gitlab::Database::LoadBalancing::SidekiqClientMiddleware.new }
+ let(:load_balancer) { double.as_null_object }
+ let(:load_balancing_metric) { double('load balancing metric') }
+ let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e" } }
+
+ def process_job
+ client_middleware.call(worker_class, job, queue, double) do
+ worker_class.process_job(job)
+ end
end
before do
@@ -132,84 +216,97 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
TestWorker.class_eval do
include Sidekiq::Worker
include WorkerAttributes
+
+ def perform(*args)
+ end
end
+
+ allow(::Gitlab::Database::LoadBalancing).to receive_message_chain(:proxy, :load_balancer).and_return(load_balancer)
+ allow(load_balancing_metric).to receive(:increment)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
end
- let(:worker) { TestWorker.new }
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add Gitlab::Database::LoadBalancing::SidekiqServerMiddleware
+ chain.add described_class
+ Sidekiq::Testing.inline! { example.run }
+ end
+ end
include_context 'server metrics with mocked prometheus'
+ include_context 'server metrics call'
+ include_context 'clear DB Load Balancing configuration'
- context 'when load_balancing is enabled' do
- let(:load_balancing_metric) { double('load balancing metric') }
-
- include_context 'clear DB Load Balancing configuration'
+ shared_context 'worker declaring data consistency' do
+ let(:worker_class) { LBTestWorker }
before do
- allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
- end
-
- describe '#initialize' do
- it 'sets load_balancing metrics' do
- expect(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
+ stub_const('LBTestWorker', Class.new(TestWorker))
+ LBTestWorker.class_eval do
+ include ApplicationWorker
- subject
+ data_consistency :delayed
end
end
+ end
- describe '#call' do
- include_context 'server metrics call'
-
- context 'when :database_chosen is provided' do
- where(:database_chosen) do
- %w[primary retry replica]
- end
-
- with_them do
- context "when #{params[:database_chosen]} is used" do
- let(:labels_with_load_balancing) do
- labels_with_job_status.merge(database_chosen: database_chosen, data_consistency: 'delayed')
- end
+ context 'when load_balancing is enabled' do
+ before do
+ allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(true)
+ end
- before do
- job[:database_chosen] = database_chosen
- job[:data_consistency] = 'delayed'
- allow(load_balancing_metric).to receive(:increment)
- end
+ describe '#call' do
+ context 'when worker declares data consistency' do
+ include_context 'worker declaring data consistency'
- it 'increment sidekiq_load_balancing_count' do
- expect(load_balancing_metric).to receive(:increment).with(labels_with_load_balancing, 1)
+ it 'increments load balancing counter with defined data consistency' do
+ process_job
- described_class.new.call(worker, job, :test) { nil }
- end
- end
+ expect(load_balancing_metric).to have_received(:increment).with(
+ a_hash_including(
+ data_consistency: :delayed,
+ load_balancing_strategy: 'replica'
+ ), 1)
end
end
- context 'when :database_chosen is not provided' do
- it 'does not increment sidekiq_load_balancing_count' do
- expect(load_balancing_metric).not_to receive(:increment)
+ context 'when worker does not declare data consistency' do
+ it 'increments load balancing counter with default data consistency' do
+ process_job
- described_class.new.call(worker, job, :test) { nil }
+ expect(load_balancing_metric).to have_received(:increment).with(
+ a_hash_including(
+ data_consistency: :always,
+ load_balancing_strategy: 'primary'
+ ), 1)
end
end
end
end
context 'when load_balancing is disabled' do
- include_context 'clear DB Load Balancing configuration'
+ include_context 'worker declaring data consistency'
before do
allow(::Gitlab::Database::LoadBalancing).to receive(:enable?).and_return(false)
end
describe '#initialize' do
- it 'doesnt set load_balancing metrics' do
+ it 'does not set load_balancing metrics' do
expect(Gitlab::Metrics).not_to receive(:counter).with(:sidekiq_load_balancing_count, anything)
subject
end
end
+
+ describe '#call' do
+ it 'does not increment load balancing counter' do
+ process_job
+
+ expect(load_balancing_metric).not_to have_received(:increment)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
index 4fbe59c3c27..440eca10a88 100644
--- a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb
@@ -230,11 +230,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
context 'in compress mode' do
+ let(:size_limit) { 50 }
+ let(:compression_threshold) { 30 }
let(:mode) { 'compress' }
context 'when job size is less than compression threshold' do
- let(:size_limit) { 50 }
- let(:compression_threshold) { 30 }
let(:job) { job_payload(a: 'a' * 10) }
it 'does not raise an exception' do
@@ -244,8 +244,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
context 'when job size is bigger than compression threshold and less than size limit after compressed' do
- let(:size_limit) { 50 }
- let(:compression_threshold) { 30 }
let(:args) { { a: 'a' * 300 } }
let(:job) { job_payload(args) }
@@ -260,9 +258,20 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator do
end
end
+ context 'when the job was already compressed' do
+ let(:job) do
+ job_payload({ a: 'a' * 10 })
+ .merge(Gitlab::SidekiqMiddleware::SizeLimiter::Compressor::COMPRESSED_KEY => true)
+ end
+
+ it 'does not compress the arguments again' do
+ expect(Gitlab::SidekiqMiddleware::SizeLimiter::Compressor).not_to receive(:compress)
+
+ expect { validate.call(TestSizeLimiterWorker, job) }.not_to raise_error
+ end
+ end
+
context 'when job size is bigger than compression threshold and bigger than size limit after compressed' do
- let(:size_limit) { 50 }
- let(:compression_threshold) { 30 }
let(:args) { { a: 'a' * 3000 } }
let(:job) { job_payload(args) }
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
index fff925f8532..d6cc787f53d 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
include ApplicationWorker
+ feature_category :issue_tracking
+
def self.job_for_args(args)
jobs.find { |job| job['args'] == args }
end
@@ -41,5 +43,39 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
expect(job1['meta.user']).to eq(user_per_job['job1'].username)
expect(job2['meta.user']).to eq(user_per_job['job2'].username)
end
+
+ context 'when the feature category is set in the context_proc' do
+ it 'takes the feature category from the worker, not the caller' do
+ TestWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (_) { { feature_category: 'code_review' } }
+ )
+
+ job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.feature_category']).to eq('issue_tracking')
+ expect(job2['meta.feature_category']).to eq('issue_tracking')
+ end
+ end
+
+ context 'when the feature category is already set in the surrounding block' do
+ it 'takes the feature category from the worker, not the caller' do
+ Gitlab::ApplicationContext.with_context(feature_category: 'authentication_and_authorization') do
+ TestWithContextWorker.bulk_perform_async_with_contexts(
+ %w(job1 job2),
+ arguments_proc: -> (name) { [name, 1, 2, 3] },
+ context_proc: -> (_) { {} }
+ )
+ end
+
+ job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
+ job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
+
+ expect(job1['meta.feature_category']).to eq('issue_tracking')
+ expect(job2['meta.feature_category']).to eq('issue_tracking')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb
index 44ac89c0816..2ab32657f0e 100644
--- a/spec/lib/gitlab/sidekiq_queue_spec.rb
+++ b/spec/lib/gitlab/sidekiq_queue_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
context 'when the queue is not processed in time' do
before do
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(1, 2, 12)
+ allow(sidekiq_queue).to receive(:monotonic_time).and_return(1, 2, 12)
end
it 'returns a non-completion flag, the number of jobs deleted, and the remaining queue size' do
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index dd5b8856ccd..fc2ac29a1f9 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqStatus do
- describe '.set', :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
+ describe '.set' do
it 'stores the job ID' do
described_class.set('123')
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.unset', :clean_gitlab_redis_shared_state do
+ describe '.unset' do
it 'removes the job ID' do
described_class.set('123')
described_class.unset('123')
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.all_completed?', :clean_gitlab_redis_shared_state do
+ describe '.all_completed?' do
it 'returns true if all jobs have been completed' do
expect(described_class.all_completed?(%w(123))).to eq(true)
end
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.running?', :clean_gitlab_redis_shared_state do
+ describe '.running?' do
it 'returns true if job is running' do
described_class.set('123')
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.num_running', :clean_gitlab_redis_shared_state do
+ describe '.num_running' do
it 'returns 0 if all jobs have been completed' do
expect(described_class.num_running(%w(123))).to eq(0)
end
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe '.num_completed', :clean_gitlab_redis_shared_state do
+ describe '.num_completed' do
it 'returns 1 if all jobs have been completed' do
expect(described_class.num_completed(%w(123))).to eq(1)
end
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::SidekiqStatus do
end
end
- describe 'completed', :clean_gitlab_redis_shared_state do
+ describe 'completed' do
it 'returns the completed job' do
expect(described_class.completed_jids(%w(123))).to eq(['123'])
end
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
index 491e5e9a662..15e963fe423 100644
--- a/spec/lib/gitlab/spamcheck/client_spec.rb
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Spamcheck::Client do
let(:endpoint) { 'grpc://grpc.test.url' }
let_it_be(:user) { create(:user, organization: 'GitLab') }
- let(:verdict_value) { nil }
+ let(:verdict_value) { ::Spamcheck::SpamVerdict::Verdict::ALLOW }
let(:error_value) { "" }
let(:attribs_value) do
@@ -56,6 +56,13 @@ RSpec.describe Gitlab::Spamcheck::Client do
expect(subject).to eq([expected, { "monitorMode" => "false" }, ""])
end
end
+
+ it 'includes interceptors' do
+ expect_next_instance_of(::Gitlab::Spamcheck::Client) do |client|
+ expect(client).to receive(:interceptors).and_call_original
+ end
+ subject
+ end
end
describe "#build_issue_protobuf", :aggregate_failures do
diff --git a/spec/lib/gitlab/changelog/ast_spec.rb b/spec/lib/gitlab/template_parser/ast_spec.rb
index fa15ac979fe..27361ea8632 100644
--- a/spec/lib/gitlab/changelog/ast_spec.rb
+++ b/spec/lib/gitlab/template_parser/ast_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Changelog::AST::Identifier do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Identifier do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates a selector' do
@@ -26,8 +26,8 @@ RSpec.describe Gitlab::Changelog::AST::Identifier do
end
end
-RSpec.describe Gitlab::Changelog::AST::Integer do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Integer do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates a selector' do
@@ -44,33 +44,33 @@ RSpec.describe Gitlab::Changelog::AST::Integer do
end
end
-RSpec.describe Gitlab::Changelog::AST::Selector do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Selector do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
let(:data) { { 'numbers' => [10] } }
describe '#evaluate' do
it 'evaluates a selector' do
- ident = Gitlab::Changelog::AST::Identifier.new('numbers')
- int = Gitlab::Changelog::AST::Integer.new(0)
+ ident = Gitlab::TemplateParser::AST::Identifier.new('numbers')
+ int = Gitlab::TemplateParser::AST::Integer.new(0)
expect(described_class.new([ident, int]).evaluate(state, data)).to eq(10)
end
it 'evaluates a selector that returns nil' do
- int = Gitlab::Changelog::AST::Integer.new(0)
+ int = Gitlab::TemplateParser::AST::Integer.new(0)
expect(described_class.new([int]).evaluate(state, data)).to be_nil
end
end
end
-RSpec.describe Gitlab::Changelog::AST::Variable do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Variable do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
let(:data) { { 'numbers' => [10] } }
describe '#evaluate' do
it 'evaluates a variable' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{{numbers.0}}')
.nodes[0]
@@ -80,26 +80,26 @@ RSpec.describe Gitlab::Changelog::AST::Variable do
it 'evaluates an undefined variable' do
node =
- Gitlab::Changelog::Parser.new.parse_and_transform('{{foobar}}').nodes[0]
+ Gitlab::TemplateParser::Parser.new.parse_and_transform('{{foobar}}').nodes[0]
expect(node.evaluate(state, data)).to eq('')
end
it 'evaluates the special variable "it"' do
node =
- Gitlab::Changelog::Parser.new.parse_and_transform('{{it}}').nodes[0]
+ Gitlab::TemplateParser::Parser.new.parse_and_transform('{{it}}').nodes[0]
expect(node.evaluate(state, data)).to eq(data.to_s)
end
end
end
-RSpec.describe Gitlab::Changelog::AST::Expressions do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Expressions do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates all expressions' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{{number}}foo')
@@ -108,8 +108,8 @@ RSpec.describe Gitlab::Changelog::AST::Expressions do
end
end
-RSpec.describe Gitlab::Changelog::AST::Text do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Text do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'returns the text' do
@@ -118,12 +118,12 @@ RSpec.describe Gitlab::Changelog::AST::Text do
end
end
-RSpec.describe Gitlab::Changelog::AST::If do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::If do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates a truthy if expression without an else clause' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% if thing %}foo{% end %}')
.nodes[0]
@@ -132,7 +132,7 @@ RSpec.describe Gitlab::Changelog::AST::If do
end
it 'evaluates a falsy if expression without an else clause' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% if thing %}foo{% end %}')
.nodes[0]
@@ -141,7 +141,7 @@ RSpec.describe Gitlab::Changelog::AST::If do
end
it 'evaluates a falsy if expression with an else clause' do
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% if thing %}foo{% else %}bar{% end %}')
.nodes[0]
@@ -177,13 +177,13 @@ RSpec.describe Gitlab::Changelog::AST::If do
end
end
-RSpec.describe Gitlab::Changelog::AST::Each do
- let(:state) { Gitlab::Changelog::EvalState.new }
+RSpec.describe Gitlab::TemplateParser::AST::Each do
+ let(:state) { Gitlab::TemplateParser::EvalState.new }
describe '#evaluate' do
it 'evaluates the expression' do
data = { 'animals' => [{ 'name' => 'Cat' }, { 'name' => 'Dog' }] }
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% each animals %}{{name}}{% end %}')
.nodes[0]
@@ -193,7 +193,7 @@ RSpec.describe Gitlab::Changelog::AST::Each do
it 'returns an empty string when the input is not a collection' do
data = { 'animals' => 10 }
- node = Gitlab::Changelog::Parser
+ node = Gitlab::TemplateParser::Parser
.new
.parse_and_transform('{% each animals %}{{name}}{% end %}')
.nodes[0]
@@ -237,10 +237,10 @@ RSpec.describe Gitlab::Changelog::AST::Each do
TPL
node =
- Gitlab::Changelog::Parser.new.parse_and_transform(template).nodes[0]
+ Gitlab::TemplateParser::Parser.new.parse_and_transform(template).nodes[0]
expect { node.evaluate(state, data) }
- .to raise_error(Gitlab::Changelog::Error)
+ .to raise_error(Gitlab::TemplateParser::Error)
end
end
end
diff --git a/spec/lib/gitlab/changelog/parser_spec.rb b/spec/lib/gitlab/template_parser/parser_spec.rb
index 1d353f5eb35..22247cbb693 100644
--- a/spec/lib/gitlab/changelog/parser_spec.rb
+++ b/spec/lib/gitlab/template_parser/parser_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Changelog::Parser do
+RSpec.describe Gitlab::TemplateParser::Parser do
let(:parser) { described_class.new }
describe '#root' do
@@ -67,12 +67,12 @@ RSpec.describe Gitlab::Changelog::Parser do
it 'parses and transforms a template' do
node = parser.parse_and_transform('foo')
- expect(node).to be_instance_of(Gitlab::Changelog::AST::Expressions)
+ expect(node).to be_instance_of(Gitlab::TemplateParser::AST::Expressions)
end
it 'raises parsing errors using a custom error class' do
expect { parser.parse_and_transform('{% each') }
- .to raise_error(Gitlab::Changelog::Error)
+ .to raise_error(Gitlab::TemplateParser::Error)
end
end
end
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
index 65597e6568d..f8e73a807c6 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
@@ -21,7 +21,10 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
expect(SnowplowTracker::AsyncEmitter)
.to receive(:new)
- .with('gitfoo.com', { protocol: 'https' })
+ .with('gitfoo.com',
+ { protocol: 'https',
+ on_success: subject.method(:increment_successful_events_emissions),
+ on_failure: subject.method(:failure_callback) })
.and_return(emitter)
expect(SnowplowTracker::Tracker)
@@ -40,6 +43,18 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
.to have_received(:track_struct_event)
.with('category', 'action', 'label', 'property', 1.5, nil, (Time.now.to_f * 1000).to_i)
end
+
+ it 'increase total snowplow events counter' do
+ counter = double
+
+ expect(counter).to receive(:increment)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_events_total,
+ 'Number of Snowplow events')
+ .and_return(counter)
+
+ subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
+ end
end
end
@@ -52,4 +67,43 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
end
end
end
+
+ context 'callbacks' do
+ describe 'on success' do
+ it 'increase gitlab_successful_snowplow_events_total counter' do
+ counter = double
+
+ expect(counter).to receive(:increment).with({}, 2)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_successful_events_total,
+ 'Number of successful Snowplow events emissions')
+ .and_return(counter)
+
+ subject.method(:increment_successful_events_emissions).call(2)
+ end
+ end
+
+ describe 'on failure' do
+ it 'increase gitlab_failed_snowplow_events_total counter and logs failures', :aggregate_failures do
+ counter = double
+ error_message = "Admin::AuditLogsController search_audit_event failed to be reported to collector at gitfoo.com"
+ failures = [{ "e" => "se",
+ "se_ca" => "Admin::AuditLogsController",
+ "se_ac" => "search_audit_event" }]
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_successful_events_total,
+ 'Number of successful Snowplow events emissions')
+ .and_call_original
+
+ expect(Gitlab::AppLogger).to receive(:error).with(error_message)
+ expect(counter).to receive(:increment).with({}, 1)
+ expect(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_snowplow_failed_events_total,
+ 'Number of failed Snowplow events emissions')
+ .and_return(counter)
+
+ subject.method(:failure_callback).call(2, failures)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage/docs/helper_spec.rb b/spec/lib/gitlab/usage/docs/helper_spec.rb
new file mode 100644
index 00000000000..e2bb1d8d818
--- /dev/null
+++ b/spec/lib/gitlab/usage/docs/helper_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Docs::Helper do
+ subject(:helper) { klass.new }
+
+ let_it_be(:klass) do
+ Class.new do
+ include Gitlab::Usage::Docs::Helper
+ end
+ end
+
+ let(:metric_definition) do
+ {
+ data_category: 'Standard',
+ name: 'test_metric',
+ description: description,
+ product_group: 'group::product intelligence',
+ status: 'data_available',
+ tier: %w(free premium)
+ }
+ end
+
+ let(:description) { 'Metric description' }
+
+ describe '#render_name' do
+ it { expect(helper.render_name(metric_definition[:name])).to eq('### `test_metric`') }
+ end
+
+ describe '#render_description' do
+ context 'without description' do
+ let(:description) { nil }
+
+ it { expect(helper.render_description(metric_definition)).to eq('Missing description') }
+ end
+
+ context 'without description' do
+ it { expect(helper.render_description(metric_definition)).to eq('Metric description') }
+ end
+ end
+
+ describe '#render_yaml_link' do
+ let(:yaml_link) { 'config/metrics/license/test_metric.yml' }
+ let(:expected) { "[YAML definition](#{yaml_link})" }
+
+ it { expect(helper.render_yaml_link(yaml_link)).to eq(expected) }
+ end
+
+ describe '#render_status' do
+ let(:expected) { "Status: `data_available`" }
+
+ it { expect(helper.render_status(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_owner' do
+ let(:expected) { "Group: `group::product intelligence`" }
+
+ it { expect(helper.render_owner(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_tiers' do
+ let(:expected) { "Tiers: `free`, `premium`" }
+
+ it { expect(helper.render_tiers(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_data_category' do
+ let(:expected) { 'Data Category: `Standard`' }
+
+ it { expect(helper.render_data_category(metric_definition)).to eq(expected) }
+ end
+
+ describe '#render_owner' do
+ let(:expected) { "Group: `group::product intelligence`" }
+
+ it { expect(helper.render_owner(metric_definition)).to eq(expected) }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 1ed639b2f7d..f3c3e5fc550 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
data_source: 'database',
distribution: %w(ee ce),
tier: %w(free starter premium ultimate bronze silver gold),
- name: 'count_boards'
+ name: 'uuid',
+ data_category: 'Standard'
}
end
@@ -63,6 +64,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:value_type | nil
:value_type | 'test'
:status | nil
+ :data_category | nil
:key_path | nil
:product_group | nil
:time_frame | nil
@@ -196,7 +198,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
time_frame: 'none',
data_source: 'database',
distribution: %w(ee ce),
- tier: %w(free starter premium ultimate bronze silver gold)
+ tier: %w(free starter premium ultimate bronze silver gold),
+ data_category: 'Optional'
}
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb
new file mode 100644
index 00000000000..8f52d550e5c
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/collected_data_categories_metric_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CollectedDataCategoriesMetric do
+ it_behaves_like 'a correct instrumented metric value', {} do
+ let(:expected_value) { %w[Standard Subscription Operational Optional] }
+
+ before do
+ allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
+ expect(instance).to receive(:execute).and_return(expected_value)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
new file mode 100644
index 00000000000..5e36820df5e
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
+ subject do
+ described_class.tap do |m|
+ m.relation { Issue }
+ m.operation :count
+ m.start { m.relation.minimum(:id) }
+ m.finish { m.relation.maximum(:id) }
+ end.new(time_frame: 'all')
+ end
+
+ describe '#value' do
+ let_it_be(:issue_1) { create(:issue) }
+ let_it_be(:issue_2) { create(:issue) }
+ let_it_be(:issue_3) { create(:issue) }
+ let_it_be(:issues) { Issue.all }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(3)
+ end
+
+ it 'does not cache the result of start and finish', :request_store, :use_clean_rails_redis_caching do
+ expect(Gitlab::Cache).not_to receive(:fetch_once)
+ expect(subject).to receive(:count).with(any_args, hash_including(start: issues.min_by(&:id).id, finish: issues.max_by(&:id).id)).and_call_original
+
+ subject.value
+
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_minimum_id')).to eq(nil)
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_maximum_id')).to eq(nil)
+ end
+
+ context 'with start and finish not called' do
+ subject do
+ described_class.tap do |m|
+ m.relation { Issue }
+ m.operation :count
+ end.new(time_frame: 'all')
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(3)
+ end
+ end
+
+ context 'with cache_start_and_finish_as called' do
+ subject do
+ described_class.tap do |m|
+ m.relation { Issue }
+ m.operation :count
+ m.start { m.relation.minimum(:id) }
+ m.finish { m.relation.maximum(:id) }
+ m.cache_start_and_finish_as :special_issue_count
+ end.new(time_frame: 'all')
+ end
+
+ it 'caches using the key name passed', :request_store, :use_clean_rails_redis_caching do
+ expect(Gitlab::Cache).to receive(:fetch_once).with('metric_instrumentation/special_issue_count_minimum_id', any_args).and_call_original
+ expect(Gitlab::Cache).to receive(:fetch_once).with('metric_instrumentation/special_issue_count_maximum_id', any_args).and_call_original
+ expect(subject).to receive(:count).with(any_args, hash_including(start: issues.min_by(&:id).id, finish: issues.max_by(&:id).id)).and_call_original
+
+ subject.value
+
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_minimum_id')).to eq(issues.min_by(&:id).id)
+ expect(Rails.cache.read('metric_instrumentation/special_issue_count_maximum_id')).to eq(issues.max_by(&:id).id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 4efacae0a48..d89202ae7fe 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -46,7 +46,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'pipeline_authoring',
'epics_usage',
'epic_boards_usage',
- 'secure'
+ 'secure',
+ 'network_policies'
)
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
index 78cc27c8569..6f201b43390 100644
--- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red
end
it 'includes the right events' do
- expect(described_class::KNOWN_EVENTS.size).to eq 52
+ expect(described_class::KNOWN_EVENTS.size).to eq 63
end
described_class::KNOWN_EVENTS.each do |event|
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index ea82de186f5..d84974e562a 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -435,8 +435,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:issue, project: project, author: User.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
- create(:jira_service, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
- create(:jira_service, active: true, project: create(:project, :jira_dvcs_server, creator: user))
+ create(:jira_integration, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
+ create(:jira_integration, active: true, project: create(:project, :jira_dvcs_server, creator: user))
end
expect(described_class.usage_activity_by_stage_plan({})).to include(
@@ -1078,6 +1078,16 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'gathers gitaly apdex', :aggregate_failures do
expect(subject[:settings][:gitaly_apdex]).to be_within(0.001).of(0.95)
end
+
+ it 'reports collected data categories' do
+ expected_value = %w[Standard Subscription Operational Optional]
+
+ allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance|
+ expect(instance).to receive(:execute).and_return(expected_value)
+ end
+
+ expect(subject[:settings][:collected_data_categories]).to eq(expected_value)
+ end
end
end
@@ -1269,7 +1279,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
let(:ineligible_total_categories) do
- %w[source_code ci_secrets_management incident_management_alerts snippets terraform incident_management_oncall secure]
+ %w[source_code ci_secrets_management incident_management_alerts snippets terraform incident_management_oncall secure network_policies]
end
context 'with redis_hll_tracking feature enabled' do
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 11b2a12f228..8f705d6a487 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -377,7 +377,7 @@ RSpec.describe Gitlab::Utils::UsageData do
shared_examples 'try to query Prometheus with given address' do
context 'Prometheus is ready' do
before do
- stub_request(:get, /\/-\/ready/)
+ stub_request(:get, %r{/-/ready})
.to_return(status: 200, body: 'Prometheus is Ready.\n')
end
@@ -387,7 +387,7 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'Prometheus is not reachable through HTTPS' do
before do
- stub_request(:get, /https:\/\/.*/).to_raise(Errno::ECONNREFUSED)
+ stub_request(:get, %r{https://.*}).to_raise(Errno::ECONNREFUSED)
end
context 'Prometheus is reachable through HTTP' do
@@ -396,7 +396,7 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'Prometheus is not reachable through HTTP' do
before do
- stub_request(:get, /http:\/\/.*/).to_raise(Errno::ECONNREFUSED)
+ stub_request(:get, %r{http://.*}).to_raise(Errno::ECONNREFUSED)
end
it_behaves_like 'does not query data from Prometheus'
@@ -406,7 +406,7 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'Prometheus is not ready' do
before do
- stub_request(:get, /\/-\/ready/)
+ stub_request(:get, %r{/-/ready})
.to_return(status: 503, body: 'Service Unavailable')
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index a7ccce0aaab..f1601294c07 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -351,6 +351,22 @@ RSpec.describe Gitlab::Utils do
end
end
+ describe '.deep_symbolized_access' do
+ let(:hash) do
+ { "variables" => [{ "key" => "VAR1", "value" => "VALUE2" }] }
+ end
+
+ subject { described_class.deep_symbolized_access(hash) }
+
+ it 'allows to access hash keys with symbols' do
+ expect(subject[:variables]).to be_a(Array)
+ end
+
+ it 'allows to access array keys with symbols' do
+ expect(subject[:variables].first[:key]).to eq('VAR1')
+ end
+ end
+
describe '.try_megabytes_to_bytes' do
context 'when the size can be converted to megabytes' do
it 'returns the size in megabytes' do
diff --git a/spec/lib/gitlab/wiki_file_finder_spec.rb b/spec/lib/gitlab/wiki_file_finder_spec.rb
index 7abe92a5a2b..3102f628de9 100644
--- a/spec/lib/gitlab/wiki_file_finder_spec.rb
+++ b/spec/lib/gitlab/wiki_file_finder_spec.rb
@@ -4,12 +4,11 @@ require 'spec_helper'
RSpec.describe Gitlab::WikiFileFinder do
describe '#find' do
- let(:project) { create(:project, :public, :wiki_repo) }
- let(:wiki) { build(:project_wiki, project: project) }
-
- before do
- wiki.create_page('Files/Title', 'Content')
- wiki.create_page('CHANGELOG', 'Files example')
+ let_it_be(:project) do
+ create(:project, :public, :wiki_repo).tap do |project|
+ project.wiki.create_page('Files/Title', 'Content')
+ project.wiki.create_page('CHANGELOG', 'Files example')
+ end
end
it_behaves_like 'file finder' do
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index 040f70236c6..dd57cd7980e 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -89,21 +89,7 @@ RSpec.describe 'Marginalia spec' do
end
end
- describe 'for ActionMailer delivery jobs' do
- # We need to ensure that this runs through Sidekiq to take
- # advantage of the middleware. There is a Rails bug that means we
- # have to do some extra steps to make this happen:
- # https://github.com/rails/rails/issues/37270#issuecomment-553927324
- around do |example|
- descendants = ActiveJob::Base.descendants + [ActiveJob::Base]
- descendants.each(&:disable_test_adapter)
- ActiveJob::Base.queue_adapter = :sidekiq
-
- example.run
-
- descendants.each { |a| a.queue_adapter = :test }
- end
-
+ describe 'for ActionMailer delivery jobs', :sidekiq_mailers do
let(:delivery_job) { MarginaliaTestMailer.first_user.deliver_later }
let(:recorded) do
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 4b374452c0a..006f4f603b6 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -136,16 +136,6 @@ RSpec.describe ObjectStorage::DirectUpload do
end
end
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(use_workhorse_s3_client: false)
- end
-
- it 'does not enable Workhorse client' do
- expect(subject[:UseWorkhorseClient]).to be false
- end
- end
-
context 'when V2 signatures are used' do
before do
credentials[:aws_signature_version] = 2
diff --git a/spec/lib/security/ci_configuration/sast_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
index 5337e8d9c39..d93175249f5 100644
--- a/spec/lib/security/ci_configuration/sast_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
@@ -323,6 +323,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -342,6 +343,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -358,6 +360,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -380,6 +383,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -415,6 +419,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -439,6 +444,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -461,6 +467,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -484,6 +491,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -507,6 +515,7 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
diff --git a/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
index f6181c6ef7a..146c60ffb6e 100644
--- a/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -62,6 +63,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
@@ -111,6 +113,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
include:
@@ -131,6 +134,7 @@ RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
include:
diff --git a/spec/lib/serializers/symbolized_json_spec.rb b/spec/lib/serializers/symbolized_json_spec.rb
new file mode 100644
index 00000000000..b30fb074ddd
--- /dev/null
+++ b/spec/lib/serializers/symbolized_json_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Serializers::SymbolizedJson do
+ describe '.dump' do
+ let(:obj) { { key: "value" } }
+
+ subject { described_class.dump(obj) }
+
+ it 'returns a hash' do
+ is_expected.to eq(obj)
+ end
+ end
+
+ describe '.load' do
+ let(:data_string) { '{"key":"value","variables":[{"key":"VAR1","value":"VALUE1"}]}' }
+ let(:data_hash) { Gitlab::Json.parse(data_string) }
+
+ context 'when loading a hash' do
+ subject { described_class.load(data_hash) }
+
+ it 'decodes a string' do
+ is_expected.to be_a(Hash)
+ end
+
+ it 'allows to access with symbols' do
+ expect(subject[:key]).to eq('value')
+ expect(subject[:variables].first[:key]).to eq('VAR1')
+ end
+ end
+
+ context 'when loading a nil' do
+ subject { described_class.load(nil) }
+
+ it 'returns nil' do
+ is_expected.to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
index 4a60dfde674..3149c316c63 100644
--- a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
@@ -39,33 +39,22 @@ RSpec.describe Sidebars::Projects::Menus::DeploymentsMenu do
end
end
- shared_examples 'feature flag :sidebar_refactor disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to be_nil }
- end
-
describe 'Feature Flags' do
let(:item_id) { :feature_flags }
it_behaves_like 'access rights checks'
- it_behaves_like 'feature flag :sidebar_refactor disabled'
end
describe 'Environments' do
let(:item_id) { :environments }
it_behaves_like 'access rights checks'
- it_behaves_like 'feature flag :sidebar_refactor disabled'
end
describe 'Releases' do
let(:item_id) { :releases }
it_behaves_like 'access rights checks'
- it_behaves_like 'feature flag :sidebar_refactor disabled'
end
end
end
diff --git a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
new file mode 100644
index 00000000000..2415598da9c
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, show_cluster_hint: false) }
+
+ describe '#render?' do
+ subject { described_class.new(context) }
+
+ context 'when menu does not have any menu items' do
+ it 'returns false' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'when menu has menu items' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+ end
+ end
+
+ describe '#link' do
+ subject { described_class.new(context) }
+
+ context 'when Kubernetes menu item is visible' do
+ it 'menu link points to Kubernetes page' do
+ expect(subject.link).to eq find_menu_item(:kubernetes).link
+ end
+ end
+
+ context 'when Kubernetes menu item is not visible' do
+ before do
+ subject.renderable_items.delete(find_menu_item(:kubernetes))
+ end
+
+ it 'menu link points to Serverless page' do
+ expect(subject.link).to eq find_menu_item(:serverless).link
+ end
+
+ context 'when Serverless menu is not visible' do
+ before do
+ subject.renderable_items.delete(find_menu_item(:serverless))
+ end
+
+ it 'menu link points to Terraform page' do
+ expect(subject.link).to eq find_menu_item(:terraform).link
+ end
+ end
+ end
+
+ def find_menu_item(menu_item)
+ subject.renderable_items.find { |i| i.item_id == menu_item }
+ end
+ end
+
+ describe 'Menu Items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Kubernetes' do
+ let(:item_id) { :kubernetes }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Serverless' do
+ let(:item_id) { :serverless }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Terraform' do
+ let(:item_id) { :terraform }
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
index ac62cd7594a..e5d486bbe8f 100644
--- a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
@@ -65,22 +65,4 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu do
end
end
end
-
- describe 'Menu Items' do
- subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
-
- describe 'Labels' do
- let(:item_id) { :labels }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.not_to be_nil }
- end
- end
- end
end
diff --git a/spec/lib/sidebars/projects/menus/labels_menu_spec.rb b/spec/lib/sidebars/projects/menus/labels_menu_spec.rb
deleted file mode 100644
index e1420f9e61b..00000000000
--- a/spec/lib/sidebars/projects/menus/labels_menu_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::LabelsMenu do
- let(:project) { build(:project) }
- let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
-
- subject { described_class.new(context) }
-
- it 'does not contain any sub menu' do
- expect(subject.has_items?).to eq false
- end
-
- describe '#render?' do
- let(:issues_enabled) { true }
-
- before do
- allow(project).to receive(:issues_enabled?).and_return(issues_enabled)
- end
-
- context 'when feature flag :sidebar_refactor is enabled' do
- let(:issues_enabled) { false }
-
- it 'returns false' do
- expect(subject.render?).to be_falsey
- end
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- context 'when user can read labels' do
- context 'when issues feature is enabled' do
- it 'returns false' do
- expect(subject.render?).to be_falsey
- end
- end
-
- context 'when issues feature is disabled' do
- let(:issues_enabled) { false }
-
- it 'returns true' do
- expect(subject.render?).to be_truthy
- end
- end
- end
-
- context 'when user cannot read labels' do
- let(:user) { nil }
-
- it 'returns false' do
- expect(subject.render?).to be_falsey
- end
- end
- end
- end
-end
diff --git a/spec/lib/sidebars/projects/menus/members_menu_spec.rb b/spec/lib/sidebars/projects/menus/members_menu_spec.rb
deleted file mode 100644
index dcc085c2957..00000000000
--- a/spec/lib/sidebars/projects/menus/members_menu_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::MembersMenu do
- let(:project) { build(:project) }
- let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
-
- subject { described_class.new(context) }
-
- describe '#render?' do
- it 'returns false' do
- expect(subject.render?).to eq false
- end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it 'returns true' do
- expect(subject.render?).to eq true
- end
-
- context 'when user cannot access members' do
- let(:user) { nil }
-
- it 'returns false' do
- expect(subject.render?).to eq false
- end
- end
- end
- end
-end
diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
index 93618fa3321..381842be5ab 100644
--- a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
@@ -41,43 +41,30 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
it 'returns "Monitor"' do
expect(subject.title).to eq 'Monitor'
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'returns "Operations"' do
- stub_feature_flags(sidebar_refactor: false)
-
- expect(subject.title).to eq 'Operations'
- end
- end
end
describe '#extra_container_html_options' do
it 'returns "shortcuts-monitor"' do
expect(subject.extra_container_html_options).to eq(class: 'shortcuts-monitor')
end
-
- context 'when feature flag :sidebar_refactor is disabled' do
- it 'returns "shortcuts-operations"' do
- stub_feature_flags(sidebar_refactor: false)
-
- expect(subject.extra_container_html_options).to eq(class: 'shortcuts-operations')
- end
- end
end
describe '#link' do
- context 'when metrics dashboard is visible' do
- it 'returns link to the metrics dashboard page' do
- expect(subject.link).to include('/-/environments/metrics')
- end
+ let(:foo_path) { '/foo_path'}
+
+ let(:foo_menu) do
+ ::Sidebars::MenuItem.new(
+ title: 'foo',
+ link: foo_path,
+ active_routes: {},
+ item_id: :foo
+ )
end
- context 'when metrics dashboard is not visible' do
- it 'returns link to the feature flags page' do
- project.project_feature.update!(operations_access_level: Featurable::DISABLED)
+ it 'returns first visible item link' do
+ subject.insert_element_before(subject.renderable_items, subject.renderable_items.first.item_id, foo_menu)
- expect(subject.link).to include('/-/feature_flags')
- end
+ expect(subject.link).to eq foo_path
end
end
@@ -130,76 +117,6 @@ RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
it_behaves_like 'access rights checks'
end
- describe 'Serverless' do
- let(:item_id) { :serverless }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Terraform' do
- let(:item_id) { :terraform }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Kubernetes' do
- let(:item_id) { :kubernetes }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Environments' do
- let(:item_id) { :environments }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
- describe 'Feature Flags' do
- let(:item_id) { :feature_flags }
-
- specify { is_expected.to be_nil }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- it_behaves_like 'access rights checks'
- end
- end
-
describe 'Product Analytics' do
let(:item_id) { :product_analytics }
diff --git a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
index 748796bc7ee..7e8d0ab0518 100644
--- a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
let(:user) { project.owner }
let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
@@ -12,59 +12,36 @@ RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
subject { described_class.new(context).container_html_options }
specify { is_expected.to match(hash_including(class: 'shortcuts-project-information has-sub-items')) }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to match(hash_including(class: 'shortcuts-project rspec-project-link has-sub-items')) }
- end
end
describe 'Menu Items' do
subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
- describe 'Releases' do
- let(:item_id) { :releases }
+ describe 'Labels' do
+ let(:item_id) { :labels }
- specify { is_expected.to be_nil }
+ specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
+ context 'when merge requests are disabled' do
before do
- stub_feature_flags(sidebar_refactor: false)
+ project.project_feature.update_attribute(:merge_requests_access_level, Featurable::DISABLED)
end
- context 'when project repository is empty' do
- it 'does not include releases menu item' do
- allow(project).to receive(:empty_repo?).and_return(true)
+ specify { is_expected.not_to be_nil }
+ end
- is_expected.to be_nil
- end
+ context 'when issues are disabled' do
+ before do
+ project.project_feature.update_attribute(:issues_access_level, Featurable::DISABLED)
end
- context 'when project repository is not empty' do
- context 'when user can download code' do
- specify { is_expected.not_to be_nil }
- end
-
- context 'when user cannot download code' do
- let(:user) { nil }
-
- specify { is_expected.to be_nil }
- end
- end
+ specify { is_expected.not_to be_nil }
end
- end
-
- describe 'Labels' do
- let(:item_id) { :labels }
-
- specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
+ context 'when merge requests and issues are disabled' do
before do
- stub_feature_flags(sidebar_refactor: false)
+ project.project_feature.update_attribute(:merge_requests_access_level, Featurable::DISABLED)
+ project.project_feature.update_attribute(:issues_access_level, Featurable::DISABLED)
end
specify { is_expected.to be_nil }
@@ -76,10 +53,8 @@ RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
+ describe 'when the user does not have access' do
+ let(:user) { nil }
specify { is_expected.to be_nil }
end
diff --git a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
index f84d458a2e1..5040ef9b0ff 100644
--- a/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/scope_menu_spec.rb
@@ -11,13 +11,5 @@ RSpec.describe Sidebars::Projects::Menus::ScopeMenu do
subject { described_class.new(context).container_html_options }
specify { is_expected.to match(hash_including(class: 'shortcuts-project rspec-project-link')) }
-
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to eq(aria: { label: project.name }) }
- end
end
end
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
index 6817f0e6ed6..9b79614db20 100644
--- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -99,14 +99,6 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
specify { expect(subject.title).to eq 'Monitor' }
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { expect(subject.title).to eq 'Operations' }
- end
-
describe 'when the user does not have access' do
let(:user) { nil }
@@ -159,14 +151,6 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
specify { is_expected.not_to be_nil }
- context 'when feature flag :sidebar_refactor is disabled' do
- before do
- stub_feature_flags(sidebar_refactor: false)
- end
-
- specify { is_expected.to be_nil }
- end
-
describe 'when the user does not have access' do
let(:user) { nil }