summaryrefslogtreecommitdiff
path: root/spec/lib/gitlab
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-05-19 15:44:42 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-05-19 15:44:42 +0000
commit4555e1b21c365ed8303ffb7a3325d773c9b8bf31 (patch)
tree5423a1c7516cffe36384133ade12572cf709398d /spec/lib/gitlab
parente570267f2f6b326480d284e0164a6464ba4081bc (diff)
downloadgitlab-ce-4555e1b21c365ed8303ffb7a3325d773c9b8bf31.tar.gz
Add latest changes from gitlab-org/gitlab@13-12-stable-eev13.12.0-rc42
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/alert_management/payload/base_spec.rb6
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb25
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb2
-rw-r--r--spec/lib/gitlab/api_authentication/token_locator_spec.rb109
-rw-r--r--spec/lib/gitlab/api_authentication/token_resolver_spec.rb49
-rw-r--r--spec/lib/gitlab/auth_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb21
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb21
-rw-r--r--spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb126
-rw-r--r--spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb29
-rw-r--r--spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb241
-rw-r--r--spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb52
-rw-r--r--spec/lib/gitlab/cache_spec.rb29
-rw-r--r--spec/lib/gitlab/chat/responder_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/cache_spec.rb98
-rw-r--r--spec/lib/gitlab/ci/build/policy/changes_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/build/releaser_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/build/step_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb352
-rw-r--r--spec/lib/gitlab/ci/config/entry/caches_spec.rb70
-rw-r--r--spec/lib/gitlab/ci/config/entry/default_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/hidden_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb127
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/file/template_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb247
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/reports/test_failure_history_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/status/core_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/syntax_templates_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/trace/chunked_io_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb149
-rw-r--r--spec/lib/gitlab/class_attributes_spec.rb62
-rw-r--r--spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb2
-rw-r--r--spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb2
-rw-r--r--spec/lib/gitlab/conan_token_spec.rb4
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb12
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb29
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb6
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb8
-rw-r--r--spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb102
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb87
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb110
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb120
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb106
-rw-r--r--spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb50
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb232
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_log_spec.rb38
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb36
-rw-r--r--spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb83
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb244
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb18
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb6
-rw-r--r--spec/lib/gitlab/database_spec.rb30
-rw-r--r--spec/lib/gitlab/default_branch_spec.rb24
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb32
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb5
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb84
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb27
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb41
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb27
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb53
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing_spec.rb32
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb15
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb59
-rw-r--r--spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb31
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb31
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb31
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb18
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb8
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb27
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb1
-rw-r--r--spec/lib/gitlab/external_authorization/access_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/client_spec.rb2
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb5
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb10
-rw-r--r--spec/lib/gitlab/git/lfs_changes_spec.rb1
-rw-r--r--spec/lib/gitlab/git/merge_base_spec.rb1
-rw-r--r--spec/lib/gitlab/git/push_spec.rb1
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb10
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb6
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb16
-rw-r--r--spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb2
-rw-r--r--spec/lib/gitlab/git_access_design_spec.rb1
-rw-r--r--spec/lib/gitlab/git_access_project_spec.rb1
-rw-r--r--spec/lib/gitlab/git_access_spec.rb12
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/remote_service_spec.rb52
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb21
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb26
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb13
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb18
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb14
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/deprecation_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/docs/renderer_spec.rb407
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb5
-rw-r--r--spec/lib/gitlab/graphql/present/field_extension_spec.rb42
-rw-r--r--spec/lib/gitlab/graphql/queries_spec.rb17
-rw-r--r--spec/lib/gitlab/highlight_spec.rb58
-rw-r--r--spec/lib/gitlab/hook_data/key_builder_spec.rb73
-rw-r--r--spec/lib/gitlab/hook_data/project_builder_spec.rb83
-rw-r--r--spec/lib/gitlab/i18n_spec.rb15
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml16
-rw-r--r--spec/lib/gitlab/import_export/attribute_configuration_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/base/relation_factory_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/command_line_util_spec.rb15
-rw-r--r--spec/lib/gitlab/import_export/config_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/file_importer_spec.rb16
-rw-r--r--spec/lib/gitlab/import_export/group/relation_factory_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb58
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb72
-rw-r--r--spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/references_configuration_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml3
-rw-r--r--spec/lib/gitlab/integrations/sti_type_spec.rb116
-rw-r--r--spec/lib/gitlab/jwt_token_spec.rb6
-rw-r--r--spec/lib/gitlab/kas_spec.rb40
-rw-r--r--spec/lib/gitlab/lfs/client_spec.rb2
-rw-r--r--spec/lib/gitlab/memory/instrumentation_spec.rb6
-rw-r--r--spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb42
-rw-r--r--spec/lib/gitlab/middleware/speedscope_spec.rb61
-rw-r--r--spec/lib/gitlab/nav/top_nav_menu_item_spec.rb23
-rw-r--r--spec/lib/gitlab/object_hierarchy_spec.rb2
-rw-r--r--spec/lib/gitlab/pages/settings_spec.rb8
-rw-r--r--spec/lib/gitlab/pages/stores/local_store_spec.rb25
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb127
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb743
-rw-r--r--spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb91
-rw-r--r--spec/lib/gitlab/performance_bar_spec.rb26
-rw-r--r--spec/lib/gitlab/prometheus/adapter_spec.rb4
-rw-r--r--spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb6
-rw-r--r--spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb6
-rw-r--r--spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb187
-rw-r--r--spec/lib/gitlab/relative_positioning/item_context_spec.rb16
-rw-r--r--spec/lib/gitlab/repository_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_hash_cache_spec.rb1
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_size_checker_spec.rb1
-rw-r--r--spec/lib/gitlab/repository_size_error_message_spec.rb1
-rw-r--r--spec/lib/gitlab/runtime_spec.rb14
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb82
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_router_spec.rb212
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb14
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb215
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb2
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb121
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb7
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb53
-rw-r--r--spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb17
-rw-r--r--spec/lib/gitlab/terraform_registry_token_spec.rb41
-rw-r--r--spec/lib/gitlab/tracking/docs/helper_spec.rb91
-rw-r--r--spec/lib/gitlab/tracking/docs/renderer_spec.rb23
-rw-r--r--spec/lib/gitlab/tracking/event_definition_spec.rb101
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb16
-rw-r--r--spec/lib/gitlab/tracking_spec.rb3
-rw-r--r--spec/lib/gitlab/tree_summary_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb16
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb105
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb89
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb60
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb35
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb9
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb9
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb15
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb7
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb7
-rw-r--r--spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb23
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb1
-rw-r--r--spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb56
-rw-r--r--spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb47
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb129
214 files changed, 6088 insertions, 2330 deletions
diff --git a/spec/lib/gitlab/alert_management/payload/base_spec.rb b/spec/lib/gitlab/alert_management/payload/base_spec.rb
index e093b3587c2..d3c1a96253c 100644
--- a/spec/lib/gitlab/alert_management/payload/base_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/base_spec.rb
@@ -89,6 +89,12 @@ RSpec.describe Gitlab::AlertManagement::Payload::Base do
it { is_expected.to be_nil }
end
+
+ context 'with time in seconds' do
+ let(:raw_payload) { { 'test' => 1618877936 } }
+
+ it { is_expected.to be_nil }
+ end
end
context 'with an integer type provided' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
index 0a333965f68..24f8fb40445 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
@@ -62,4 +62,29 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
expect(records.size).to eq(2)
end
+
+ describe 'in progress filter' do
+ let_it_be(:mr3) { create(:merge_request, :opened, target_project: project, source_project: project, allow_broken: true, created_at: 3.months.ago) }
+ let_it_be(:mr4) { create(:merge_request, :closed, target_project: project, source_project: project, allow_broken: true, created_at: 1.month.ago) }
+
+ before do
+ params[:from] = 5.months.ago
+ end
+
+ context 'when the filter is present' do
+ before do
+ params[:end_event_filter] = :in_progress
+ end
+
+ it 'returns only open items' do
+ expect(records).to eq([mr3])
+ end
+ end
+
+ context 'when the filter is absent' do
+ it 'returns finished items' do
+ expect(records).to match_array([mr1, mr2])
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
index 8f5be709a11..daf85ea379a 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::Sorting do
let(:stage) { build(:cycle_analytics_project_stage, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) }
- subject(:order_values) { described_class.apply(MergeRequest.joins(:metrics), stage, sort, direction).order_values }
+ subject(:order_values) { described_class.new(query: MergeRequest.joins(:metrics), stage: stage).apply(sort, direction).order_values }
context 'when invalid sorting params are given' do
let(:sort) { :unknown_sort }
diff --git a/spec/lib/gitlab/api_authentication/token_locator_spec.rb b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
index e933fd8352e..4b19a3d5846 100644
--- a/spec/lib/gitlab/api_authentication/token_locator_spec.rb
+++ b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
let(:request) { double(authorization: nil) }
it 'returns nil' do
- expect(subject).to be(nil)
+ expect(subject).to be_nil
end
end
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
let(:request) { double(headers: {}) }
it 'returns nil' do
- expect(subject).to be(nil)
+ expect(subject).to be_nil
end
end
@@ -72,5 +72,110 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
end
end
end
+
+ context 'with :http_bearer_token' do
+ let(:type) { :http_bearer_token }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { "Authorization" => "Bearer #{password}" }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :http_deploy_token_header' do
+ let(:type) { :http_deploy_token_header }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Deploy-Token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :http_job_token_header' do
+ let(:type) { :http_job_token_header }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Job-Token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :http_private_token_header' do
+ let(:type) { :http_private_token_header }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Private-Token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :token_param' do
+ let(:type) { :token_param }
+
+ context 'without credentials' do
+ let(:request) { double(query_parameters: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(query_parameters: { 'token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/api_authentication/token_resolver_spec.rb b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
index 97a7c8ba7cf..bbc6bf0d481 100644
--- a/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
+++ b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
@@ -160,9 +160,58 @@ RSpec.describe Gitlab::APIAuthentication::TokenResolver do
it_behaves_like 'an authorized request'
end
end
+
+ context 'with :personal_access_token_from_jwt' do
+ let(:type) { :personal_access_token_from_jwt }
+ let(:token) { personal_access_token }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password_from_jwt(token.id) }
+
+ it_behaves_like 'an authorized request'
+ end
+ end
+
+ context 'with :deploy_token_from_jwt' do
+ let(:type) { :deploy_token_from_jwt }
+ let(:token) { deploy_token }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password_from_jwt(token.token) }
+
+ it_behaves_like 'an authorized request'
+ end
+ end
+
+ context 'with :job_token_from_jwt' do
+ let(:type) { :job_token_from_jwt }
+ let(:token) { ci_job }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password_from_jwt(token.token) }
+
+ it_behaves_like 'an authorized request'
+ end
+
+ context 'when the job is not running' do
+ let(:raw) { username_and_password_from_jwt(ci_job_done.token) }
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'with an invalid job token' do
+ let(:raw) { username_and_password_from_jwt('not a valid CI job token') }
+
+ it_behaves_like 'an unauthorized request'
+ end
+ end
end
def username_and_password(username, password)
::Gitlab::APIAuthentication::TokenLocator::UsernameAndPassword.new(username, password)
end
+
+ def username_and_password_from_jwt(token)
+ username_and_password(nil, ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = token }.encoded)
+ end
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 7a578ad3c90..7f06e66ad50 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -779,7 +779,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end.not_to change(user, :failed_attempts)
end
- context 'when the database is read only' do
+ context 'when the database is read-only' do
before do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
new file mode 100644
index 00000000000..35928deff82
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210506065000 do
+ let(:namespaces_table) { table(:namespaces) }
+
+ let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
+ let!(:root_group) { namespaces_table.create!(id: 2, name: 'group', path: 'group', type: 'Group', parent_id: nil) }
+ let!(:sub_group) { namespaces_table.create!(id: 3, name: 'subgroup', path: 'subgroup', type: 'Group', parent_id: 2) }
+
+ describe '#perform' do
+ it 'backfills traversal_ids for child namespaces' do
+ described_class.new.perform(1, 3, 5)
+
+ expect(user_namespace.reload.traversal_ids).to eq([])
+ expect(root_group.reload.traversal_ids).to eq([])
+ expect(sub_group.reload.traversal_ids).to eq([root_group.id, sub_group.id])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
new file mode 100644
index 00000000000..96e43275972
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210506065000 do
+ let(:namespaces_table) { table(:namespaces) }
+
+ let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
+ let!(:root_group) { namespaces_table.create!(id: 2, name: 'group', path: 'group', type: 'Group', parent_id: nil) }
+ let!(:sub_group) { namespaces_table.create!(id: 3, name: 'subgroup', path: 'subgroup', type: 'Group', parent_id: 2) }
+
+ describe '#perform' do
+ it 'backfills traversal_ids for root namespaces' do
+ described_class.new.perform(1, 3, 5)
+
+ expect(user_namespace.reload.traversal_ids).to eq([user_namespace.id])
+ expect(root_group.reload.traversal_ids).to eq([root_group.id])
+ expect(sub_group.reload.traversal_ids).to eq([])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
index c4c0247ad3e..3e378db04d4 100644
--- a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
@@ -6,6 +6,11 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
let(:table_name) { :copy_primary_key_test }
let(:test_table) { table(table_name) }
let(:sub_batch_size) { 1000 }
+ let(:pause_ms) { 0 }
+
+ let(:helpers) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers)
+ end
before do
ActiveRecord::Base.connection.execute(<<~SQL)
@@ -14,8 +19,8 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
id integer NOT NULL,
name character varying,
fk integer NOT NULL,
- id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
- fk_convert_to_bigint bigint DEFAULT 0 NOT NULL,
+ #{helpers.convert_to_bigint_column(:id)} bigint DEFAULT 0 NOT NULL,
+ #{helpers.convert_to_bigint_column(:fk)} bigint DEFAULT 0 NOT NULL,
name_convert_to_text text DEFAULT 'no name'
);
SQL
@@ -34,43 +39,85 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
SQL
end
- subject { described_class.new }
+ subject(:copy_columns) { described_class.new }
describe '#perform' do
let(:migration_class) { described_class.name }
it 'copies all primary keys in range' do
- subject.perform(12, 15, table_name, 'id', sub_batch_size, 'id', 'id_convert_to_bigint')
+ temporary_column = helpers.convert_to_bigint_column(:id)
+ copy_columns.perform(12, 15, table_name, 'id', sub_batch_size, pause_ms, 'id', temporary_column)
- expect(test_table.where('id = id_convert_to_bigint').pluck(:id)).to contain_exactly(12, 15)
- expect(test_table.where(id_convert_to_bigint: 0).pluck(:id)).to contain_exactly(11, 19)
+ expect(test_table.where("id = #{temporary_column}").pluck(:id)).to contain_exactly(12, 15)
+ expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(11, 19)
expect(test_table.all.count).to eq(4)
end
it 'copies all foreign keys in range' do
- subject.perform(10, 14, table_name, 'id', sub_batch_size, 'fk', 'fk_convert_to_bigint')
+ temporary_column = helpers.convert_to_bigint_column(:fk)
+ copy_columns.perform(10, 14, table_name, 'id', sub_batch_size, pause_ms, 'fk', temporary_column)
- expect(test_table.where('fk = fk_convert_to_bigint').pluck(:id)).to contain_exactly(11, 12)
- expect(test_table.where(fk_convert_to_bigint: 0).pluck(:id)).to contain_exactly(15, 19)
+ expect(test_table.where("fk = #{temporary_column}").pluck(:id)).to contain_exactly(11, 12)
+ expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(15, 19)
expect(test_table.all.count).to eq(4)
end
it 'copies columns with NULLs' do
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(4)
- subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
+ copy_columns.perform(10, 20, table_name, 'id', sub_batch_size, pause_ms, 'name', 'name_convert_to_text')
expect(test_table.where('name = name_convert_to_text').pluck(:id)).to contain_exactly(11, 12, 19)
expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
end
+ it 'copies multiple columns when given' do
+ columns_to_copy_from = %w[id fk]
+ id_tmp_column = helpers.convert_to_bigint_column('id')
+ fk_tmp_column = helpers.convert_to_bigint_column('fk')
+ columns_to_copy_to = [id_tmp_column, fk_tmp_column]
+
+ subject.perform(10, 15, table_name, 'id', sub_batch_size, pause_ms, columns_to_copy_from, columns_to_copy_to)
+
+ expect(test_table.where("id = #{id_tmp_column} AND fk = #{fk_tmp_column}").pluck(:id)).to contain_exactly(11, 12, 15)
+ expect(test_table.where(id_tmp_column => 0).where(fk_tmp_column => 0).pluck(:id)).to contain_exactly(19)
+ expect(test_table.all.count).to eq(4)
+ end
+
+ it 'raises error when number of source and target columns does not match' do
+ columns_to_copy_from = %w[id fk]
+ columns_to_copy_to = [helpers.convert_to_bigint_column(:id)]
+
+ expect do
+ subject.perform(10, 15, table_name, 'id', sub_batch_size, pause_ms, columns_to_copy_from, columns_to_copy_to)
+ end.to raise_error(ArgumentError, 'number of source and destination columns must match')
+ end
+
it 'tracks timings of queries' do
- expect(subject.batch_metrics.timings).to be_empty
+ expect(copy_columns.batch_metrics.timings).to be_empty
+
+ copy_columns.perform(10, 20, table_name, 'id', sub_batch_size, pause_ms, 'name', 'name_convert_to_text')
+
+ expect(copy_columns.batch_metrics.timings[:update_all]).not_to be_empty
+ end
+
+ context 'pause interval between sub-batches' do
+ it 'sleeps for the specified time between sub-batches' do
+ sub_batch_size = 2
+
+ expect(copy_columns).to receive(:sleep).with(0.005)
+
+ copy_columns.perform(10, 12, table_name, 'id', sub_batch_size, 5, 'name', 'name_convert_to_text')
+ end
+
+ it 'treats negative values as 0' do
+ sub_batch_size = 2
- subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
+ expect(copy_columns).to receive(:sleep).with(0)
- expect(subject.batch_metrics.timings[:update_all]).not_to be_empty
+ copy_columns.perform(10, 12, table_name, 'id', sub_batch_size, -5, 'name', 'name_convert_to_text')
+ end
end
end
end
diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..c4beb719e1e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20201110110454 do
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let_it_be(:users) { table(:users) }
+ let_it_be(:user) { create_user! }
+ let_it_be(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+
+ let_it_be(:scanners) { table(:vulnerability_scanners) }
+ let_it_be(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let_it_be(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+
+ let_it_be(:vulnerabilities) { table(:vulnerabilities) }
+ let_it_be(:vulnerability_with_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_without_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let_it_be(:primary_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let_it_be(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let_it_be(:finding) do
+ create_finding!(
+ vulnerability_id: vulnerability_with_finding.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: primary_identifier.id
+ )
+ end
+
+ let(:succeeded_status) { 1 }
+ let(:pending_status) { 0 }
+
+ it 'drops Vulnerabilities without any Findings' do
+ expect(vulnerabilities.pluck(:id)).to eq([vulnerability_with_finding.id, vulnerability_without_finding.id])
+
+ expect { subject.perform(vulnerability_with_finding.id, vulnerability_without_finding.id) }.to change(vulnerabilities, :count).by(-1)
+
+ expect(vulnerabilities.pluck(:id)).to eq([vulnerability_with_finding.id])
+ end
+
+ it 'marks jobs as done' do
+ background_migration_jobs.create!(
+ class_name: 'DropInvalidVulnerabilities',
+ arguments: [vulnerability_with_finding.id, vulnerability_with_finding.id]
+ )
+
+ background_migration_jobs.create!(
+ class_name: 'DropInvalidVulnerabilities',
+ arguments: [vulnerability_without_finding.id, vulnerability_without_finding.id]
+ )
+
+ subject.perform(vulnerability_with_finding.id, vulnerability_with_finding.id)
+
+ expect(background_migration_jobs.first.status).to eq(succeeded_status)
+ expect(background_migration_jobs.second.status).to eq(pending_status)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ vulnerabilities_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: Time.current
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
new file mode 100644
index 00000000000..5e2f32c54be
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics, schema: 20210511095658 do
+ it 'correctly migrates project taggings context from tags to topics' do
+ taggings = table(:taggings)
+
+ project_old_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'tags')
+ project_new_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'topics')
+ project_other_context_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'other')
+ project_old_tagging_2 = taggings.create!(taggable_type: 'Project', context: 'tags')
+ project_old_tagging_3 = taggings.create!(taggable_type: 'Project', context: 'tags')
+
+ subject.perform(project_old_tagging_1.id, project_old_tagging_2.id)
+
+ project_old_tagging_1.reload
+ project_new_tagging_1.reload
+ project_other_context_tagging_1.reload
+ project_old_tagging_2.reload
+ project_old_tagging_3.reload
+
+ expect(project_old_tagging_1.context).to eq('topics')
+ expect(project_new_tagging_1.context).to eq('topics')
+ expect(project_other_context_tagging_1.context).to eq('other')
+ expect(project_old_tagging_2.context).to eq('topics')
+ expect(project_old_tagging_3.context).to eq('tags')
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
index 1c62d703a34..b34a57f51f1 100644
--- a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
+++ b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
@@ -31,6 +31,15 @@ RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjec
end
it 'copies values to project_features' do
+ table(:background_migration_jobs).create!(
+ class_name: 'MoveContainerRegistryEnabledToProjectFeature',
+ arguments: [project1.id, project4.id]
+ )
+ table(:background_migration_jobs).create!(
+ class_name: 'MoveContainerRegistryEnabledToProjectFeature',
+ arguments: [-1, -3]
+ )
+
expect(project1.container_registry_enabled).to eq(true)
expect(project2.container_registry_enabled).to eq(false)
expect(project3.container_registry_enabled).to eq(nil)
@@ -57,6 +66,9 @@ RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjec
expect(project_feature1.reload.container_registry_access_level).to eq(enabled)
expect(project_feature2.reload.container_registry_access_level).to eq(disabled)
expect(project_feature3.reload.container_registry_access_level).to eq(disabled)
+
+ expect(table(:background_migration_jobs).first.status).to eq(1) # succeeded
+ expect(table(:background_migration_jobs).second.status).to eq(0) # pending
end
context 'when no projects exist in range' do
diff --git a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb
deleted file mode 100644
index 1c55b50ea3f..00000000000
--- a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb
+++ /dev/null
@@ -1,241 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizations, schema: 20200204113223 do
- let(:users_table) { table(:users) }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:project_authorizations_table) { table(:project_authorizations) }
- let(:members_table) { table(:members) }
- let(:group_group_links) { table(:group_group_links) }
- let(:project_group_links) { table(:project_group_links) }
-
- let(:user) { users_table.create!(id: 1, email: 'user@example.com', projects_limit: 10) }
- let(:group) { namespaces_table.create!(type: 'Group', name: 'group', path: 'group') }
-
- subject { described_class.new.perform([user.id]) }
-
- context 'missing authorization' do
- context 'personal project' do
- before do
- user_namespace = namespaces_table.create!(owner_id: user.id, name: 'User', path: 'user')
- projects_table.create!(id: 1,
- name: 'personal-project',
- path: 'personal-project',
- visibility_level: 0,
- namespace_id: user_namespace.id)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 40)]))
- end
- end
-
- context 'group membership' do
- before do
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, notification_level: 3)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'inherited group membership' do
- before do
- sub_group = namespaces_table.create!(type: 'Group', name: 'subgroup',
- path: 'subgroup', parent_id: group.id)
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: sub_group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, notification_level: 3)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'project membership' do
- before do
- project = projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: project.id, source_type: 'Project',
- type: 'ProjectMember', access_level: 20, notification_level: 3)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'shared group' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, notification_level: 3)
-
- shared_group = namespaces_table.create!(type: 'Group', name: 'shared group',
- path: 'shared-group')
- projects_table.create!(id: 1, name: 'project', path: 'project', visibility_level: 0,
- namespace_id: shared_group.id)
-
- group_group_links.create!(shared_group_id: shared_group.id, shared_with_group_id: group.id,
- group_access: 20)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'shared project' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, notification_level: 3)
-
- another_group = namespaces_table.create!(type: 'Group', name: 'another group', path: 'another-group')
- shared_project = projects_table.create!(id: 1, name: 'shared project', path: 'shared-project',
- visibility_level: 0, namespace_id: another_group.id)
-
- project_group_links.create!(project_id: shared_project.id, group_id: group.id, group_access: 20)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
- end
-
- context 'unapproved access requests' do
- context 'group membership' do
- before do
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, requested_at: Time.now, notification_level: 3)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'inherited group membership' do
- before do
- sub_group = namespaces_table.create!(type: 'Group', name: 'subgroup', path: 'subgroup',
- parent_id: group.id)
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: sub_group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, requested_at: Time.now, notification_level: 3)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'project membership' do
- before do
- project = projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: project.id, source_type: 'Project',
- type: 'ProjectMember', access_level: 20, requested_at: Time.now, notification_level: 3)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'shared group' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, requested_at: Time.now, notification_level: 3)
-
- shared_group = namespaces_table.create!(type: 'Group', name: 'shared group',
- path: 'shared-group')
- projects_table.create!(id: 1, name: 'project', path: 'project', visibility_level: 0,
- namespace_id: shared_group.id)
-
- group_group_links.create!(shared_group_id: shared_group.id, shared_with_group_id: group.id,
- group_access: 20)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'shared project' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, requested_at: Time.now, notification_level: 3)
-
- another_group = namespaces_table.create!(type: 'Group', name: 'another group', path: 'another-group')
- shared_project = projects_table.create!(id: 1, name: 'shared project', path: 'shared-project',
- visibility_level: 0, namespace_id: another_group.id)
-
- project_group_links.create!(project_id: shared_project.id, group_id: group.id, group_access: 20)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
- end
-
- context 'incorrect authorization' do
- before do
- project = projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, notification_level: 3)
-
- project_authorizations_table.create!(user_id: user.id, project_id: project.id,
- access_level: 10)
- end
-
- it 'fixes authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 30)]))
- end
- end
-
- context 'unwanted authorization' do
- before do
- project = projects_table.create!(name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
-
- project_authorizations_table.create!(user_id: user.id, project_id: project.id,
- access_level: 10)
- end
-
- it 'deletes authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(1).to(0)
- end
- end
-
- context 'deleted user' do
- it 'does not fail' do
- expect { described_class.new.perform([non_existing_record_id]) }.not_to raise_error
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
new file mode 100644
index 00000000000..fc4d776b8be
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210427212034 do
+ let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
+ let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:issue1) { table(:issues).create!(project_id: project1.id) }
+ let!(:issue2) { table(:issues).create!(project_id: project2.id) }
+ let!(:merge_request1) { table(:merge_requests).create!(target_project_id: project1.id, source_branch: 'master', target_branch: 'feature') }
+ let!(:merge_request2) { table(:merge_requests).create!(target_project_id: project2.id, source_branch: 'master', target_branch: 'feature') }
+ let!(:timelog1) { table(:timelogs).create!(issue_id: issue1.id, time_spent: 60) }
+ let!(:timelog2) { table(:timelogs).create!(issue_id: issue1.id, time_spent: 60) }
+ let!(:timelog3) { table(:timelogs).create!(issue_id: issue2.id, time_spent: 60) }
+ let!(:timelog4) { table(:timelogs).create!(merge_request_id: merge_request1.id, time_spent: 600) }
+ let!(:timelog5) { table(:timelogs).create!(merge_request_id: merge_request1.id, time_spent: 600) }
+ let!(:timelog6) { table(:timelogs).create!(merge_request_id: merge_request2.id, time_spent: 600) }
+ let!(:timelog7) { table(:timelogs).create!(issue_id: issue2.id, time_spent: 60, project_id: project1.id) }
+ let!(:timelog8) { table(:timelogs).create!(merge_request_id: merge_request2.id, time_spent: 600, project_id: project1.id) }
+
+ describe '#perform' do
+ context 'when timelogs belong to issues' do
+ it 'sets correct project_id' do
+ subject.perform(timelog1.id, timelog3.id)
+
+ expect(timelog1.reload.project_id).to eq(issue1.project_id)
+ expect(timelog2.reload.project_id).to eq(issue1.project_id)
+ expect(timelog3.reload.project_id).to eq(issue2.project_id)
+ end
+ end
+
+ context 'when timelogs belong to merge requests' do
+ it 'sets correct project ids' do
+ subject.perform(timelog4.id, timelog6.id)
+
+ expect(timelog4.reload.project_id).to eq(merge_request1.target_project_id)
+ expect(timelog5.reload.project_id).to eq(merge_request1.target_project_id)
+ expect(timelog6.reload.project_id).to eq(merge_request2.target_project_id)
+ end
+ end
+
+ context 'when timelogs already belong to projects' do
+ it 'does not update the project id' do
+ subject.perform(timelog7.id, timelog8.id)
+
+ expect(timelog7.reload.project_id).to eq(project1.id)
+ expect(timelog8.reload.project_id).to eq(project1.id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache_spec.rb b/spec/lib/gitlab/cache_spec.rb
new file mode 100644
index 00000000000..5b1034a77a3
--- /dev/null
+++ b/spec/lib/gitlab/cache_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Cache, :request_store do
+ describe "#fetch_once" do
+ subject do
+ proc do
+ described_class.fetch_once([:test, "key"], expires_in: 10.minutes) do
+ "return value"
+ end
+ end
+ end
+
+ it "fetches from the cache once" do
+ expect(Rails.cache).to receive(:fetch).once.with([:test, "key"], expires_in: 10.minutes).and_call_original
+
+ expect(subject.call).to eq("return value")
+ expect(subject.call).to eq("return value")
+ end
+
+ it "always returns from the request store" do
+ expect(Gitlab::SafeRequestStore).to receive(:fetch).twice.with([:test, "key"]).and_call_original
+
+ expect(subject.call).to eq("return value")
+ expect(subject.call).to eq("return value")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/chat/responder_spec.rb b/spec/lib/gitlab/chat/responder_spec.rb
index 6603dbe8d52..803f30da9e7 100644
--- a/spec/lib/gitlab/chat/responder_spec.rb
+++ b/spec/lib/gitlab/chat/responder_spec.rb
@@ -16,8 +16,8 @@ RSpec.describe Gitlab::Chat::Responder do
it 'returns the responder for the build' do
pipeline = create(:ci_pipeline)
build = create(:ci_build, pipeline: pipeline)
- service = double(:service, chat_responder: Gitlab::Chat::Responder::Slack)
- chat_name = double(:chat_name, service: service)
+ integration = double(:integration, chat_responder: Gitlab::Chat::Responder::Slack)
+ chat_name = double(:chat_name, integration: integration)
chat_data = double(:chat_data, chat_name: chat_name)
allow(pipeline)
diff --git a/spec/lib/gitlab/ci/build/cache_spec.rb b/spec/lib/gitlab/ci/build/cache_spec.rb
index 9188045988b..7477aedb994 100644
--- a/spec/lib/gitlab/ci/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/build/cache_spec.rb
@@ -4,11 +4,23 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Cache do
describe '.initialize' do
- context 'when the multiple cache feature flag is disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
+ context 'when the cache is an array' do
+ it 'instantiates an array of cache seeds' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b)
+
+ cache = described_class.new(cache_config, pipeline)
+
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' })
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' })
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a, cache_seed_b])
end
+ end
+ context 'when the cache is a hash' do
it 'instantiates a cache seed' do
cache_config = { key: 'key-a' }
pipeline = double(::Ci::Pipeline)
@@ -18,87 +30,35 @@ RSpec.describe Gitlab::Ci::Build::Cache do
cache = described_class.new(cache_config, pipeline)
expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
- expect(cache.instance_variable_get(:@cache)).to eq(cache_seed)
- end
- end
-
- context 'when the multiple cache feature flag is enabled' do
- context 'when the cache is an array' do
- it 'instantiates an array of cache seeds' do
- cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
- pipeline = double(::Ci::Pipeline)
- cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b)
-
- cache = described_class.new(cache_config, pipeline)
-
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' })
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' })
- expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a, cache_seed_b])
- end
- end
-
- context 'when the cache is a hash' do
- it 'instantiates a cache seed' do
- cache_config = { key: 'key-a' }
- pipeline = double(::Ci::Pipeline)
- cache_seed = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed)
-
- cache = described_class.new(cache_config, pipeline)
-
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
- expect(cache.instance_variable_get(:@cache)).to eq([cache_seed])
- end
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed])
end
end
end
describe '#cache_attributes' do
- context 'when the multiple cache feature flag is disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- it "returns the cache seed's build attributes" do
- cache_config = { key: 'key-a' }
+ context 'when there are no caches' do
+ it 'returns an empty hash' do
+ cache_config = []
pipeline = double(::Ci::Pipeline)
cache = described_class.new(cache_config, pipeline)
attributes = cache.cache_attributes
- expect(attributes).to eq({
- options: { cache: { key: 'key-a' } }
- })
+ expect(attributes).to eq({})
end
end
- context 'when the multiple cache feature flag is enabled' do
- context 'when there are no caches' do
- it 'returns an empty hash' do
- cache_config = []
- pipeline = double(::Ci::Pipeline)
- cache = described_class.new(cache_config, pipeline)
-
- attributes = cache.cache_attributes
-
- expect(attributes).to eq({})
- end
- end
-
- context 'when there are caches' do
- it 'returns the structured attributes for the caches' do
- cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
- pipeline = double(::Ci::Pipeline)
- cache = described_class.new(cache_config, pipeline)
+ context 'when there are caches' do
+ it 'returns the structured attributes for the caches' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache = described_class.new(cache_config, pipeline)
- attributes = cache.cache_attributes
+ attributes = cache.cache_attributes
- expect(attributes).to eq({
- options: { cache: cache_config }
- })
- end
+ expect(attributes).to eq({
+ options: { cache: cache_config }
+ })
end
end
end
diff --git a/spec/lib/gitlab/ci/build/policy/changes_spec.rb b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
index 016730e01cd..5d5a212b9a5 100644
--- a/spec/lib/gitlab/ci/build/policy/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
@@ -120,6 +120,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
context 'when branch is created' do
let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
ref: 'feature',
diff --git a/spec/lib/gitlab/ci/build/releaser_spec.rb b/spec/lib/gitlab/ci/build/releaser_spec.rb
index fa5e90674a6..435f70e9ac5 100644
--- a/spec/lib/gitlab/ci/build/releaser_spec.rb
+++ b/spec/lib/gitlab/ci/build/releaser_spec.rb
@@ -15,18 +15,25 @@ RSpec.describe Gitlab::Ci::Build::Releaser do
tag_name: 'release-$CI_COMMIT_SHA',
ref: '$CI_COMMIT_SHA',
milestones: %w[m1 m2 m3],
- released_at: '2020-07-15T08:00:00Z'
+ released_at: '2020-07-15T08:00:00Z',
+ assets: {
+ links: [
+ { name: 'asset1', url: 'https://example.com/assets/1', link_type: 'other', filepath: '/pretty/asset/1' },
+ { name: 'asset2', url: 'https://example.com/assets/2' }
+ ]
+ }
}
}
end
it 'generates the script' do
- expect(subject).to eq(['release-cli create --name "Release $CI_COMMIT_SHA" --description "Created using the release-cli $EXTRA_DESCRIPTION" --tag-name "release-$CI_COMMIT_SHA" --ref "$CI_COMMIT_SHA" --released-at "2020-07-15T08:00:00Z" --milestone "m1" --milestone "m2" --milestone "m3"'])
+ expect(subject).to eq(['release-cli create --name "Release $CI_COMMIT_SHA" --description "Created using the release-cli $EXTRA_DESCRIPTION" --tag-name "release-$CI_COMMIT_SHA" --ref "$CI_COMMIT_SHA" --released-at "2020-07-15T08:00:00Z" --milestone "m1" --milestone "m2" --milestone "m3" --assets-link "{\"name\":\"asset1\",\"url\":\"https://example.com/assets/1\",\"link_type\":\"other\",\"filepath\":\"/pretty/asset/1\"}" --assets-link "{\"name\":\"asset2\",\"url\":\"https://example.com/assets/2\"}"'])
end
end
context 'individual nodes' do
using RSpec::Parameterized::TableSyntax
+ links = { links: [{ name: 'asset1', url: 'https://example.com/assets/1', link_type: 'other', filepath: '/pretty/asset/1' }] }
where(:node_name, :node_value, :result) do
:name | 'Release $CI_COMMIT_SHA' | 'release-cli create --name "Release $CI_COMMIT_SHA"'
@@ -35,6 +42,7 @@ RSpec.describe Gitlab::Ci::Build::Releaser do
:ref | '$CI_COMMIT_SHA' | 'release-cli create --ref "$CI_COMMIT_SHA"'
:milestones | %w[m1 m2 m3] | 'release-cli create --milestone "m1" --milestone "m2" --milestone "m3"'
:released_at | '2020-07-15T08:00:00Z' | 'release-cli create --released-at "2020-07-15T08:00:00Z"'
+ :assets | links | "release-cli create --assets-link #{links[:links][0].to_json.to_json}"
end
with_them do
diff --git a/spec/lib/gitlab/ci/build/step_spec.rb b/spec/lib/gitlab/ci/build/step_spec.rb
index 4b8f68b9fa8..938b52c496c 100644
--- a/spec/lib/gitlab/ci/build/step_spec.rb
+++ b/spec/lib/gitlab/ci/build/step_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::Ci::Build::Step do
let(:job) { create(:ci_build, :release_options) }
it 'returns the release-cli command line' do
- expect(subject.script).to eq(["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""])
+ expect(subject.script).to eq(["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\" --assets-link \"{\\\"name\\\":\\\"asset1\\\",\\\"url\\\":\\\"https://example.com/assets/1\\\"}\""])
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index cec1c97085b..247f4b63910 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -7,295 +7,227 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
subject(:entry) { described_class.new(config) }
- context 'with multiple caches' do
+ describe 'validations' do
before do
entry.compose!
end
- describe '#valid?' do
- context 'with an empty hash as cache' do
- let(:config) { {} }
-
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
-
- context 'when configuration is valid with a single cache' do
- let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
-
- it 'is valid' do
- expect(entry).to be_valid
+ context 'when entry config value is correct' do
+ let(:policy) { nil }
+ let(:key) { 'some key' }
+ let(:when_config) { nil }
+
+ let(:config) do
+ {
+ key: key,
+ untracked: true,
+ paths: ['some/path/']
+ }.tap do |config|
+ config[:policy] = policy if policy
+ config[:when] = when_config if when_config
end
end
- context 'when configuration is valid with multiple caches' do
- let(:config) do
- [
- { key: 'key', paths: ["logs/"], untracked: true },
- { key: 'key2', paths: ["logs/"], untracked: true },
- { key: 'key3', paths: ["logs/"], untracked: true }
- ]
+ describe '#value' do
+ shared_examples 'hash key value' do
+ it 'returns hash value' do
+ expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
+ end
end
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
+ it_behaves_like 'hash key value'
- context 'when configuration is not a Hash or Array' do
- let(:config) { 'invalid' }
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
- it 'is invalid' do
- expect(entry).not_to be_valid
+ it_behaves_like 'hash key value'
end
- end
- context 'when entry values contain more than four caches' do
- let(:config) do
- [
- { key: 'key', paths: ["logs/"], untracked: true },
- { key: 'key2', paths: ["logs/"], untracked: true },
- { key: 'key3', paths: ["logs/"], untracked: true },
- { key: 'key4', paths: ["logs/"], untracked: true },
- { key: 'key5', paths: ["logs/"], untracked: true }
- ]
- end
+ context 'with files and prefix' do
+ let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
- it 'is invalid' do
- expect(entry.errors).to eq(["caches config no more than 4 caches can be created"])
- expect(entry).not_to be_valid
+ it_behaves_like 'hash key value'
end
- end
- end
- end
- context 'with a single cache' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
- describe 'validations' do
- before do
- entry.compose!
- end
-
- context 'when entry config value is correct' do
- let(:policy) { nil }
- let(:key) { 'some key' }
- let(:when_config) { nil }
+ context 'with prefix' do
+ let(:key) { { prefix: 'prefix-value' } }
- let(:config) do
- {
- key: key,
- untracked: true,
- paths: ['some/path/']
- }.tap do |config|
- config[:policy] = policy if policy
- config[:when] = when_config if when_config
+ it 'key is nil' do
+ expect(entry.value).to match(a_hash_including(key: nil))
end
end
- describe '#value' do
- shared_examples 'hash key value' do
- it 'returns hash value' do
- expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
- end
- end
-
- it_behaves_like 'hash key value'
-
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
-
- it_behaves_like 'hash key value'
- end
-
- context 'with files and prefix' do
- let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
-
- it_behaves_like 'hash key value'
+ context 'with `policy`' do
+ where(:policy, :result) do
+ 'pull-push' | 'pull-push'
+ 'push' | 'push'
+ 'pull' | 'pull'
+ 'unknown' | 'unknown' # invalid
end
- context 'with prefix' do
- let(:key) { { prefix: 'prefix-value' } }
-
- it 'key is nil' do
- expect(entry.value).to match(a_hash_including(key: nil))
- end
+ with_them do
+ it { expect(entry.value).to include(policy: result) }
end
+ end
- context 'with `policy`' do
- where(:policy, :result) do
- 'pull-push' | 'pull-push'
- 'push' | 'push'
- 'pull' | 'pull'
- 'unknown' | 'unknown' # invalid
- end
-
- with_them do
- it { expect(entry.value).to include(policy: result) }
- end
+ context 'without `policy`' do
+ it 'assigns policy to default' do
+ expect(entry.value).to include(policy: 'pull-push')
end
+ end
- context 'without `policy`' do
- it 'assigns policy to default' do
- expect(entry.value).to include(policy: 'pull-push')
- end
+ context 'with `when`' do
+ where(:when_config, :result) do
+ 'on_success' | 'on_success'
+ 'on_failure' | 'on_failure'
+ 'always' | 'always'
+ 'unknown' | 'unknown' # invalid
end
- context 'with `when`' do
- where(:when_config, :result) do
- 'on_success' | 'on_success'
- 'on_failure' | 'on_failure'
- 'always' | 'always'
- 'unknown' | 'unknown' # invalid
- end
-
- with_them do
- it { expect(entry.value).to include(when: result) }
- end
+ with_them do
+ it { expect(entry.value).to include(when: result) }
end
+ end
- context 'without `when`' do
- it 'assigns when to default' do
- expect(entry.value).to include(when: 'on_success')
- end
+ context 'without `when`' do
+ it 'assigns when to default' do
+ expect(entry.value).to include(when: 'on_success')
end
end
+ end
- describe '#valid?' do
- it { is_expected.to be_valid }
+ describe '#valid?' do
+ it { is_expected.to be_valid }
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
- it { is_expected.to be_valid }
- end
+ it { is_expected.to be_valid }
end
+ end
- context 'with `policy`' do
- where(:policy, :valid) do
- 'pull-push' | true
- 'push' | true
- 'pull' | true
- 'unknown' | false
- end
+ context 'with `policy`' do
+ where(:policy, :valid) do
+ 'pull-push' | true
+ 'push' | true
+ 'pull' | true
+ 'unknown' | false
+ end
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
- end
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
end
end
+ end
- context 'with `when`' do
- where(:when_config, :valid) do
- 'on_success' | true
- 'on_failure' | true
- 'always' | true
- 'unknown' | false
- end
+ context 'with `when`' do
+ where(:when_config, :valid) do
+ 'on_success' | true
+ 'on_failure' | true
+ 'always' | true
+ 'unknown' | false
+ end
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
- end
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
end
end
+ end
- context 'with key missing' do
- let(:config) do
- { untracked: true,
- paths: ['some/path/'] }
- end
+ context 'with key missing' do
+ let(:config) do
+ { untracked: true,
+ paths: ['some/path/'] }
+ end
- describe '#value' do
- it 'sets key with the default' do
- expect(entry.value[:key])
- .to eq(Gitlab::Ci::Config::Entry::Key.default)
- end
+ describe '#value' do
+ it 'sets key with the default' do
+ expect(entry.value[:key])
+ .to eq(Gitlab::Ci::Config::Entry::Key.default)
end
end
end
+ end
- context 'when entry value is not correct' do
- describe '#errors' do
- subject { entry.errors }
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ subject { entry.errors }
- context 'when is not a hash' do
- let(:config) { 'ls' }
+ context 'when is not a hash' do
+ let(:config) { 'ls' }
- it 'reports errors with config value' do
- is_expected.to include 'cache config should be a hash'
- end
+ it 'reports errors with config value' do
+ is_expected.to include 'cache config should be a hash'
end
+ end
- context 'when policy is unknown' do
- let(:config) { { policy: 'unknown' } }
+ context 'when policy is unknown' do
+ let(:config) { { policy: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache policy should be pull-push, push, or pull')
- end
+ it 'reports error' do
+ is_expected.to include('cache policy should be pull-push, push, or pull')
end
+ end
- context 'when `when` is unknown' do
- let(:config) { { when: 'unknown' } }
+ context 'when `when` is unknown' do
+ let(:config) { { when: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache when should be on_success, on_failure or always')
- end
+ it 'reports error' do
+ is_expected.to include('cache when should be on_success, on_failure or always')
end
+ end
- context 'when descendants are invalid' do
- context 'with invalid keys' do
- let(:config) { { key: 1 } }
-
- it 'reports error with descendants' do
- is_expected.to include 'key should be a hash, a string or a symbol'
- end
- end
-
- context 'with empty key' do
- let(:config) { { key: {} } }
+ context 'when descendants are invalid' do
+ context 'with invalid keys' do
+ let(:config) { { key: 1 } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key should be a hash, a string or a symbol'
end
+ end
- context 'with invalid files' do
- let(:config) { { key: { files: 'a-file' } } }
+ context 'with empty key' do
+ let(:config) { { key: {} } }
- it 'reports error with descendants' do
- is_expected.to include 'key:files config should be an array of strings'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
end
+ end
- context 'with prefix without files' do
- let(:config) { { key: { prefix: 'a-prefix' } } }
+ context 'with invalid files' do
+ let(:config) { { key: { files: 'a-file' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key:files config should be an array of strings'
end
+ end
- context 'when there is an unknown key present' do
- let(:config) { { key: { unknown: 'a-file' } } }
+ context 'with prefix without files' do
+ let(:config) { { key: { prefix: 'a-prefix' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config contains unknown keys: unknown'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
end
end
context 'when there is an unknown key present' do
- let(:config) { { invalid: true } }
+ let(:config) { { key: { unknown: 'a-file' } } }
it 'reports error with descendants' do
- is_expected.to include 'cache config contains unknown keys: invalid'
+ is_expected.to include 'key config contains unknown keys: unknown'
end
end
end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { invalid: true } }
+
+ it 'reports error with descendants' do
+ is_expected.to include 'cache config contains unknown keys: invalid'
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/caches_spec.rb b/spec/lib/gitlab/ci/config/entry/caches_spec.rb
new file mode 100644
index 00000000000..047cef53b96
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/caches_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Caches do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:entry) { described_class.new(config) }
+
+ before do
+ entry.compose!
+ end
+
+ describe '#valid?' do
+ context 'with an empty hash as cache' do
+ let(:config) { {} }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is valid with a single cache' do
+ let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is valid with multiple caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true }
+ ]
+ end
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is not a Hash or Array' do
+ let(:config) { 'invalid' }
+
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ end
+ end
+
+ context 'when entry values contain more than four caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true },
+ { key: 'key4', paths: ["logs/"], untracked: true },
+ { key: 'key5', paths: ["logs/"], untracked: true }
+ ]
+ end
+
+ it 'is invalid' do
+ expect(entry.errors).to eq(["caches config no more than 4 caches can be created"])
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/default_spec.rb b/spec/lib/gitlab/ci/config/entry/default_spec.rb
index 6e46d02a96e..5613b0f09d1 100644
--- a/spec/lib/gitlab/ci/config/entry/default_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/default_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Default do
+ let(:config) { {} }
let(:entry) { described_class.new(config) }
it_behaves_like 'with inheritable CI config' do
diff --git a/spec/lib/gitlab/ci/config/entry/hidden_spec.rb b/spec/lib/gitlab/ci/config/entry/hidden_spec.rb
index 090ef67f39d..7a2ecee0dae 100644
--- a/spec/lib/gitlab/ci/config/entry/hidden_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/hidden_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Hidden do
end
describe '.new' do
+ let(:config) { {} }
let(:entry) { described_class.new(config) }
describe 'validations' do
@@ -41,8 +42,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Hidden do
context 'when entry value is not correct' do
context 'when config is empty' do
- let(:config) { {} }
-
describe '#valid' do
it 'is invalid' do
expect(entry).not_to be_valid
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index ffcd029172a..1d23ab0c2c7 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -556,42 +556,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- context 'when job config overrides default config' do
- before do
- entry.compose!(deps)
- end
-
- let(:config) do
- { script: 'rspec', image: 'some_image', cache: { key: 'test' } }
- end
-
- it 'overrides default config' do
- expect(entry[:image].value).to eq(name: 'some_image')
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
- end
- end
-
- context 'when job config does not override default config' do
- before do
- allow(default).to receive('[]').with(:image).and_return(specified)
-
- entry.compose!(deps)
- end
-
- let(:config) { { script: 'ls', cache: { key: 'test' } } }
-
- it 'uses config from default entry' do
- expect(entry[:image].value).to eq 'specified'
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
- end
- end
- end
-
context 'with workflow rules' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb b/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb
index 53809d2d549..0ac8d01b8e4 100644
--- a/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Kubernetes do
+ let(:config) { Hash(namespace: 'namespace') }
+
subject { described_class.new(config) }
describe 'attributes' do
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 041eb748fc9..31e3545e8d8 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -175,68 +175,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
)
end
end
-
- context 'with multuple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#jobs_value' do
- it 'returns jobs configuration' do
- expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
- expect(root.jobs_value[:rspec]).to eq(
- { name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- expect(root.jobs_value[:spinach]).to eq(
- { name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- expect(root.jobs_value[:release]).to eq(
- { name: :release,
- stage: 'release',
- before_script: [],
- script: ["make changelog | tee release_changelog.txt"],
- release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
- image: { name: "ruby:2.7" },
- services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
- cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' },
- only: { refs: %w(branches tags) },
- variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
- job_variables: { 'VAR' => 'job' },
- root_variables_inheritance: true,
- after_script: [],
- ignore: false,
- scheduling_type: :stage }
- )
- end
- end
- end
end
end
@@ -255,56 +193,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
spinach: { before_script: [], variables: { VAR: 'job' }, script: 'spinach' } }
end
- context 'with multiple_cache_per_job FF disabled' do
- context 'when composed' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#errors' do
- it 'has no errors' do
- expect(root.errors).to be_empty
- end
- end
-
- describe '#jobs_value' do
- it 'returns jobs configuration' do
- expect(root.jobs_value).to eq(
- rspec: { name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage },
- spinach: { name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'job' },
- job_variables: { 'VAR' => 'job' },
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- end
- end
- end
- end
-
context 'when composed' do
before do
root.compose!
@@ -390,19 +278,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
expect(root.cache_value).to eq([key: 'a', policy: 'pull-push', when: 'on_success'])
end
end
-
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#cache_value' do
- it 'returns correct cache definition' do
- expect(root.cache_value).to eq(key: 'a', policy: 'pull-push', when: 'on_success')
- end
- end
- end
end
context 'when variables resembles script-type job' do
@@ -525,7 +400,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
context 'when entry exists' do
it 'returns correct entry' do
expect(root[:cache])
- .to be_an_instance_of Gitlab::Ci::Config::Entry::Cache
+ .to be_an_instance_of Gitlab::Ci::Config::Entry::Caches
expect(root[:jobs][:rspec][:script].value).to eq ['ls']
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 7e39fae7b9b..3d1fc32a62d 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::File::Local do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:sha) { '12345' }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:params) { { local: location } }
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index 0e8851ba915..c53914c5772 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
let_it_be(:context_project) { create(:project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:context_user) { user }
let(:parent_pipeline) { double(:parent_pipeline) }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
diff --git a/spec/lib/gitlab/ci/config/external/file/template_spec.rb b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
index ad1d93a64a1..75b22c1516c 100644
--- a/spec/lib/gitlab/ci/config/external/file/template_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::File::Template do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:context_params) { { project: project, sha: '12345', user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:template) { 'Auto-DevOps.gitlab-ci.yml' }
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index e5b008a482e..88097f3f56a 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index d657c3e943f..e032d372ecb 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:another_project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:sha) { '12345' }
let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index 53dea1d0d19..6019318a401 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
+
let(:pipeline) { Ci::Pipeline.new }
let(:variables_attributes) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 1d17244e519..2727f2603cd 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:prev_pipeline) { create(:ci_pipeline, project: project) }
let(:new_commit) { create(:commit, project: project) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
index 23cdec61bb3..499dc3554a3 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::Deployments do
it 'adds an informative error to the pipeline' do
perform
- expect(pipeline.errors.messages).to include(base: ['Pipeline has too many deployments! Requested 2, but the limit is 1.'])
+ expect(pipeline.errors.added?(:base, 'Pipeline has too many deployments! Requested 2, but the limit is 1.')).to be true
end
it 'increments the error metric' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 264076859cb..2e537f40692 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -218,15 +218,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
context 'N+1 queries' do
- it 'avoids N+1 queries when calculating variables of jobs' do
+ it 'avoids N+1 queries when calculating variables of jobs', :use_sql_query_cache do
+ warm_up_pipeline, warm_up_command = prepare_pipeline1
+ perform_seed(warm_up_pipeline, warm_up_command)
+
pipeline1, command1 = prepare_pipeline1
pipeline2, command2 = prepare_pipeline2
- control = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
perform_seed(pipeline1, command1)
end
- expect { perform_seed(pipeline2, command2) }.not_to exceed_query_limit(
+ expect { perform_seed(pipeline2, command2) }.not_to exceed_all_query_limit(
control.count + expected_extra_queries
)
end
@@ -259,15 +262,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
def expected_extra_queries
extra_jobs = 2
- non_handled_sql_queries = 3
-
- # 1. Ci::Build Load () SELECT "ci_builds".* FROM "ci_builds"
- # WHERE "ci_builds"."type" = 'Ci::Build'
- # AND "ci_builds"."commit_id" IS NULL
- # AND ("ci_builds"."retried" = FALSE OR "ci_builds"."retried" IS NULL)
- # AND (stage_idx < 1)
- # 2. Ci::InstanceVariable Load => `Ci::InstanceVariable#cached_data` => already cached with `fetch_memory_cache`
- # 3. Ci::Variable Load => `Project#ci_variables_for` => already cached with `Gitlab::SafeRequestStore`
+ non_handled_sql_queries = 2
+
+ # 1. Ci::InstanceVariable Load => `Ci::InstanceVariable#cached_data` => already cached with `fetch_memory_cache`
+ # 2. Ci::Variable Load => `Project#ci_variables_for` => already cached with `Gitlab::SafeRequestStore`
extra_jobs * non_handled_sql_queries
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
index e4768f2ef0d..27af8d379ef 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
@@ -21,17 +21,25 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Skip do
before do
allow(pipeline).to receive(:git_commit_message)
.and_return('commit message [ci skip]')
-
- step.perform!
end
it 'breaks the chain' do
+ step.perform!
+
expect(step.break?).to be true
end
it 'skips the pipeline' do
+ step.perform!
+
expect(pipeline.reload).to be_skipped
end
+
+ it 'calls ensure_project_iid explicitly' do
+ expect(pipeline).to receive(:ensure_project_iid!)
+
+ step.perform!
+ end
end
context 'when pipeline has not been skipped' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
index cd868a57bbc..8e0b032e68c 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::TemplateUsage do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:command) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index caf3a053c4e..e3061f8095b 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { build(:ci_empty_pipeline, user: user, project: project) }
let!(:step) { described_class.new(pipeline, command) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
index 7eefb4d7876..feedef18dcd 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { build_stubbed(:ci_pipeline) }
let!(:step) { described_class.new(pipeline, command) }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
index 773cb61b946..910c12389c3 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -9,253 +9,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
let(:processor) { described_class.new(pipeline, config) }
- context 'with multiple_cache_per_job ff disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- describe '#build_attributes' do
- subject { processor.build_attributes }
-
- context 'with cache:key' do
- let(:config) do
- {
- key: 'a-key',
- paths: ['vendor/ruby']
- }
- end
-
- it { is_expected.to include(options: { cache: config }) }
- end
-
- context 'with cache:key as a symbol' do
- let(:config) do
- {
- key: :a_key,
- paths: ['vendor/ruby']
- }
- end
-
- it { is_expected.to include(options: { cache: config.merge(key: "a_key") }) }
- end
-
- context 'with cache:key:files' do
- shared_examples 'default key' do
- let(:config) do
- { key: { files: files } }
- end
-
- it 'uses default key' do
- expected = { options: { cache: { key: 'default' } } }
-
- is_expected.to include(expected)
- end
- end
-
- shared_examples 'version and gemfile files' do
- let(:config) do
- {
- key: {
- files: files
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'builds a string key' do
- expected = {
- options: {
- cache: {
- key: '703ecc8fef1635427a1f86a8a1a308831c122392',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with existing files' do
- let(:files) { ['VERSION', 'Gemfile.zip'] }
-
- it_behaves_like 'version and gemfile files'
- end
-
- context 'with files starting with ./' do
- let(:files) { ['Gemfile.zip', './VERSION'] }
-
- it_behaves_like 'version and gemfile files'
- end
-
- context 'with files ending with /' do
- let(:files) { ['Gemfile.zip/'] }
-
- it_behaves_like 'default key'
- end
-
- context 'with new line in filenames' do
- let(:files) { ["Gemfile.zip\nVERSION"] }
-
- it_behaves_like 'default key'
- end
-
- context 'with missing files' do
- let(:files) { ['project-gemfile.lock', ''] }
-
- it_behaves_like 'default key'
- end
-
- context 'with directories' do
- shared_examples 'foo/bar directory key' do
- let(:config) do
- {
- key: {
- files: files
- }
- }
- end
-
- it 'builds a string key' do
- expected = {
- options: {
- cache: { key: '74bf43fb1090f161bdd4e265802775dbda2f03d1' }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with directory' do
- let(:files) { ['foo/bar'] }
-
- it_behaves_like 'foo/bar directory key'
- end
-
- context 'with directory ending in slash' do
- let(:files) { ['foo/bar/'] }
-
- it_behaves_like 'foo/bar directory key'
- end
-
- context 'with directories ending in slash star' do
- let(:files) { ['foo/bar/*'] }
-
- it_behaves_like 'foo/bar directory key'
- end
- end
- end
-
- context 'with cache:key:prefix' do
- context 'without files' do
- let(:config) do
- {
- key: {
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix to default key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-default',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with existing files' do
- let(:config) do
- {
- key: {
- files: ['VERSION', 'Gemfile.zip'],
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-703ecc8fef1635427a1f86a8a1a308831c122392',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with missing files' do
- let(:config) do
- {
- key: {
- files: ['project-gemfile.lock', ''],
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix to default key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-default',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
- end
-
- context 'with all cache option keys' do
- let(:config) do
- {
- key: 'a-key',
- paths: ['vendor/ruby'],
- untracked: true,
- policy: 'push',
- when: 'on_success'
- }
- end
-
- it { is_expected.to include(options: { cache: config }) }
- end
-
- context 'with unknown cache option keys' do
- let(:config) do
- {
- key: 'a-key',
- unknown_key: true
- }
- end
-
- it { expect { subject }.to raise_error(ArgumentError, /unknown_key/) }
- end
-
- context 'with empty config' do
- let(:config) { {} }
-
- it { is_expected.to include(options: {}) }
- end
- end
- end
-
describe '#attributes' do
subject { processor.attributes }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index f97935feb86..058fb25807d 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:head_sha) { project.repository.head_commit.id }
+
let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:root_variables) { [] }
let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
@@ -89,91 +90,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- context 'with cache:key' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: 'a-value'
- }
- }
- end
-
- it { is_expected.to include(options: { cache: { key: 'a-value' } }) }
- end
-
- context 'with cache:key:files' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION']
- }
- }
- }
- end
-
- it 'includes cache options' do
- cache_options = {
- options: {
- cache: { key: 'f155568ad0933d8358f66b846133614f76dd0ca4' }
- }
- }
-
- is_expected.to include(cache_options)
- end
- end
-
- context 'with cache:key:prefix' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- prefix: 'something'
- }
- }
- }
- end
-
- it { is_expected.to include(options: { cache: { key: 'something-default' } }) }
- end
-
- context 'with cache:key:files and prefix' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION'],
- prefix: 'something'
- }
- }
- }
- end
-
- it 'includes cache options' do
- cache_options = {
- options: {
- cache: { key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4' }
- }
- }
-
- is_expected.to include(cache_options)
- end
- end
- end
-
context 'with cache:key' do
let(:attributes) do
{
diff --git a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
index 1f38c7aec63..9f7281fb714 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Deployment do
let_it_be(:project, refind: true) { create(:project, :repository) }
+
let(:pipeline) do
create(:ci_pipeline, project: project,
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
diff --git a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
index 99196d393c6..175b12637e6 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
let_it_be(:project) { create(:project) }
+
let(:job) { build(:ci_build, project: project) }
let(:seed) { described_class.new(job) }
let(:attributes) { {} }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb
index b7260599de2..9bd0e014873 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Processable::ResourceGroup do
let_it_be(:project) { create(:project) }
+
let(:job) { build(:ci_build, project: project) }
let(:seed) { described_class.new(job, resource_group_key) }
diff --git a/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb b/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
index 8b177fa7fc1..73b916da2e9 100644
--- a/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
@@ -9,14 +9,11 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityMrDiff do
let(:degradation_3) { build(:codequality_degradation_3) }
describe '#initialize!' do
- subject(:report) { described_class.new(codequality_report) }
+ subject(:report) { described_class.new(new_degradations) }
context 'when quality has degradations' do
context 'with several degradations on the same line' do
- before do
- codequality_report.add_degradation(degradation_1)
- codequality_report.add_degradation(degradation_2)
- end
+ let(:new_degradations) { [degradation_1, degradation_2] }
it 'generates quality report for mr diff' do
expect(report.files).to match(
@@ -29,11 +26,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityMrDiff do
end
context 'with several degradations on several files' do
- before do
- codequality_report.add_degradation(degradation_1)
- codequality_report.add_degradation(degradation_2)
- codequality_report.add_degradation(degradation_3)
- end
+ let(:new_degradations) { [degradation_1, degradation_2, degradation_3] }
it 'returns quality report for mr diff' do
expect(report.files).to match(
@@ -50,6 +43,8 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityMrDiff do
end
context 'when quality has no degradation' do
+ let(:new_degradations) { [] }
+
it 'returns an empty hash' do
expect(report.files).to match({})
end
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
index 8378d096fcf..e289e59b281 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
let(:base_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:head_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:major_degradation) { build(:codequality_degradation, :major) }
- let(:minor_degradation) { build(:codequality_degradation, :major) }
+ let(:minor_degradation) { build(:codequality_degradation, :minor) }
let(:critical_degradation) { build(:codequality_degradation, :critical) }
let(:blocker_degradation) { build(:codequality_degradation, :blocker) }
diff --git a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
index 9ee55177ca0..21216241cfb 100644
--- a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Ci::Reports::TestFailureHistory, :aggregate_failures do
describe '#load!' do
let_it_be(:project) { create(:project) }
+
let(:failed_rspec) { create_test_case_rspec_failed }
let(:failed_java) { create_test_case_java_failed }
diff --git a/spec/lib/gitlab/ci/status/core_spec.rb b/spec/lib/gitlab/ci/status/core_spec.rb
new file mode 100644
index 00000000000..b68e4f03433
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/core_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Ci::Status::Core do
+ let(:subj) { double("subject", cache_key: "foo") }
+
+ subject(:status) do
+ described_class.new(subj, double("user"))
+ end
+
+ describe "#cache_key" do
+ it "uses the subject's cache key" do
+ expect(status.cache_key).to eq(subj.cache_key)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/syntax_templates_spec.rb b/spec/lib/gitlab/ci/syntax_templates_spec.rb
deleted file mode 100644
index ce3169e17ec..00000000000
--- a/spec/lib/gitlab/ci/syntax_templates_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'ci/syntax_templates' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let(:lint) { Gitlab::Ci::Lint.new(project: project, current_user: user) }
-
- before do
- project.add_developer(user)
- end
-
- subject(:lint_result) { lint.validate(content) }
-
- Dir.glob('lib/gitlab/ci/syntax_templates/**/*.yml').each do |template|
- describe template do
- let(:content) { File.read(template) }
-
- it 'validates the template' do
- expect(lint_result).to be_valid, "got errors: #{lint_result.errors.join(', ')}"
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
index 1f278048ad5..053499344e1 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
index 0a76de82421..b23457315cc 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
index 25c88c161ea..1d137ef89e1 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
@@ -208,7 +208,7 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
index b64959a9917..7fa8d906d07 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
index 03fa45fe0a1..e53d2f4f975 100644
--- a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index 768256ee6b3..56443e611e8 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -22,14 +22,34 @@ RSpec.describe 'CI YML Templates' do
with_them do
let(:content) do
- <<~EOS
- include:
- - template: #{template_name}
+ if template_name == 'Security/DAST-API.gitlab-ci.yml'
+ # The DAST-API template purposly excludes a stages
+ # definition.
- concrete_build_implemented_by_a_user:
- stage: test
- script: do something
- EOS
+ <<~EOS
+ include:
+ - template: #{template_name}
+
+ stages:
+ - build
+ - test
+ - deploy
+ - dast
+
+ concrete_build_implemented_by_a_user:
+ stage: test
+ script: do something
+ EOS
+ else
+ <<~EOS
+ include:
+ - template: #{template_name}
+
+ concrete_build_implemented_by_a_user:
+ stage: test
+ script: do something
+ EOS
+ end
end
it 'is valid' do
diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
index f09e03b4d55..f878d24fe4b 100644
--- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
+++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
let_it_be(:build) { create(:ci_build, :running) }
+
let(:chunked_io) { described_class.new(build) }
before do
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 0fe7c731f27..69f56871740 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_default: :keep do
let_it_be(:project) { create_default(:project).freeze }
let_it_be_with_reload(:build) { create(:ci_build, :success) }
+
let(:trace) { described_class.new(build) }
describe "associations" do
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index ad94dfc9160..94ab4819361 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1419,155 +1419,6 @@ module Gitlab
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
- describe 'cache' do
- context 'when cache definition has unknown keys' do
- let(:config) do
- YAML.dump(
- { cache: { untracked: true, invalid: 'key' },
- rspec: { script: 'rspec' } })
- end
-
- it_behaves_like 'returns errors', 'cache config contains unknown keys: invalid'
- end
-
- it "returns cache when defined globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
- rspec: {
- script: "rspec"
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it "returns cache when defined in default context" do
- config = YAML.dump(
- {
- default: {
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: { files: ['file'] } }
- },
- rspec: {
- script: "rspec"
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache key when defined in a job' do
- config = YAML.dump({
- rspec: {
- cache: { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' },
- script: 'rspec'
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache files' do
- config = YAML.dump(
- rspec: {
- cache: {
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'] }
- },
- script: 'rspec'
- }
- )
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache files with prefix' do
- config = YAML.dump(
- rspec: {
- cache: {
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'], prefix: 'prefix' }
- },
- script: 'rspec'
- }
- )
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'], prefix: 'prefix' },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it "overwrite cache when defined for a job and globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
- rspec: {
- script: "rspec",
- cache: { paths: ["test/"], untracked: false, key: 'local' }
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["test/"],
- untracked: false,
- key: 'local',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
- end
- end
-
describe 'cache' do
context 'when cache definition has unknown keys' do
let(:config) do
diff --git a/spec/lib/gitlab/class_attributes_spec.rb b/spec/lib/gitlab/class_attributes_spec.rb
index f8766f20495..ac2a18a1860 100644
--- a/spec/lib/gitlab/class_attributes_spec.rb
+++ b/spec/lib/gitlab/class_attributes_spec.rb
@@ -6,36 +6,62 @@ RSpec.describe Gitlab::ClassAttributes do
Class.new do
include Gitlab::ClassAttributes
- def self.get_attribute(name)
- get_class_attribute(name)
+ class << self
+ attr_reader :counter_1, :counter_2
+
+ # get_class_attribute and set_class_attribute are protected,
+ # hence those methods are for testing purpose
+ def get_attribute(name)
+ get_class_attribute(name)
+ end
+
+ def set_attribute(name, value)
+ set_class_attribute(name, value)
+ end
+ end
+
+ after_set_class_attribute do
+ @counter_1 ||= 0
+ @counter_1 += 1
end
- def self.set_attribute(name, value)
- class_attributes[name] = value
+ after_set_class_attribute do
+ @counter_2 ||= 0
+ @counter_2 += 2
end
end
end
let(:subclass) { Class.new(klass) }
- describe ".get_class_attribute" do
- it "returns values set on the class" do
- klass.set_attribute(:foo, :bar)
+ it "returns values set on the class" do
+ klass.set_attribute(:foo, :bar)
- expect(klass.get_attribute(:foo)).to eq(:bar)
- end
+ expect(klass.get_attribute(:foo)).to eq(:bar)
+ end
- it "returns values set on a superclass" do
- klass.set_attribute(:foo, :bar)
+ it "returns values set on a superclass" do
+ klass.set_attribute(:foo, :bar)
- expect(subclass.get_attribute(:foo)).to eq(:bar)
- end
+ expect(subclass.get_attribute(:foo)).to eq(:bar)
+ end
- it "returns values from the subclass over attributes from a superclass" do
- klass.set_attribute(:foo, :baz)
- subclass.set_attribute(:foo, :bar)
+ it "returns values from the subclass over attributes from a superclass" do
+ klass.set_attribute(:foo, :baz)
+ subclass.set_attribute(:foo, :bar)
- expect(subclass.get_attribute(:foo)).to eq(:bar)
- end
+ expect(klass.get_attribute(:foo)).to eq(:baz)
+ expect(subclass.get_attribute(:foo)).to eq(:bar)
+ end
+
+ it "triggers after hooks after set class values" do
+ expect(klass.counter_1).to be(nil)
+ expect(klass.counter_2).to be(nil)
+
+ klass.set_attribute(:foo, :bar)
+ klass.set_attribute(:foo, :bar)
+
+ expect(klass.counter_1).to eq(2)
+ expect(klass.counter_2).to eq(4)
end
end
diff --git a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
index 5b69b34d04b..05b67a8a93f 100644
--- a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Gitlab::Cluster::Mixins::PumaCluster do
line = process.readline
puts "PUMA_DEBUG: #{line}" if ENV['PUMA_DEBUG']
end
- rescue
+ rescue StandardError
end
end
end
diff --git a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
index 0aaca0a79c2..7f7c95b2527 100644
--- a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe Gitlab::Cluster::Mixins::UnicornHttpServer do
line = process.readline
puts "UNICORN_DEBUG: #{line}" if ENV['UNICORN_DEBUG']
end
- rescue
+ rescue StandardError
end
end
end
diff --git a/spec/lib/gitlab/conan_token_spec.rb b/spec/lib/gitlab/conan_token_spec.rb
index 00683cf6e47..b6180f69044 100644
--- a/spec/lib/gitlab/conan_token_spec.rb
+++ b/spec/lib/gitlab/conan_token_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::ConanToken do
JSONWebToken::HMACToken.new(jwt_secret).tap do |jwt|
jwt['access_token'] = access_token_id
jwt['user_id'] = user_id || user_id
- jwt.expire_time = expire_time || jwt.issued_at + 1.hour
+ jwt.expire_time = expire_time || jwt.issued_at + ::Gitlab::ConanToken::CONAN_TOKEN_EXPIRE_TIME
end
end
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::ConanToken do
it 'returns nil for expired JWT' do
jwt = build_jwt(access_token_id: 123,
user_id: 456,
- expire_time: Time.zone.now - 2.hours)
+ expire_time: Time.zone.now - (::Gitlab::ConanToken::CONAN_TOKEN_EXPIRE_TIME + 1.hour))
expect(described_class.decode(jwt.encoded)).to be_nil
end
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index 46e5334cd81..f8a007cdd75 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -298,12 +298,12 @@ RSpec.describe Gitlab::Conflict::File do
end
it 'creates context sections of the correct length' do
- expect(sections[0][:lines].reject(&:type).length).to eq(3)
- expect(sections[2][:lines].reject(&:type).length).to eq(3)
- expect(sections[3][:lines].reject(&:type).length).to eq(3)
- expect(sections[5][:lines].reject(&:type).length).to eq(3)
- expect(sections[6][:lines].reject(&:type).length).to eq(3)
- expect(sections[8][:lines].reject(&:type).length).to eq(1)
+ expect(sections[0][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[2][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[3][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[5][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[6][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[8][:lines].count { |line| line.type.nil? }).to eq(1)
end
end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index a94fd6acd32..41a6c06f9c9 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -20,15 +20,34 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
describe '.default_settings_hash' do
- it 'returns empty defaults' do
+ it 'returns defaults for all keys' do
settings = described_class.default_settings_hash
- expect(settings['enabled']).to be_falsey
+ expect(settings['enabled']).to be_truthy
expect(settings['report_only']).to be_falsey
- described_class::DIRECTIVES.each do |directive|
- expect(settings['directives'].has_key?(directive)).to be_truthy
- expect(settings['directives'][directive]).to be_nil
+ directives = settings['directives']
+ directive_names = (described_class::DIRECTIVES - ['report_uri'])
+ directive_names.each do |directive|
+ expect(directives.has_key?(directive)).to be_truthy
+ expect(directives[directive]).to be_truthy
+ end
+
+ expect(directives.has_key?('report_uri')).to be_truthy
+ expect(directives['report_uri']).to be_nil
+ end
+
+ context 'when GITLAB_CDN_HOST is set' do
+ before do
+ stub_env('GITLAB_CDN_HOST', 'https://example.com')
+ end
+
+ it 'adds GITLAB_CDN_HOST to CSP' do
+ settings = described_class.default_settings_hash
+ directives = settings['directives']
+
+ expect(directives['script_src']).to eq("'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.recaptcha.net https://apis.google.com https://example.com")
+ expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://example.com")
end
end
end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 932238f281e..a31e5a1d1e2 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe Gitlab::DataBuilder::Build do
let(:build) { create(:ci_build, :running, runner: runner, user: user) }
describe '.build' do
+ around do |example|
+ travel_to(Time.current) { example.run }
+ end
+
let(:data) do
described_class.build(build)
end
@@ -22,6 +26,8 @@ RSpec.describe Gitlab::DataBuilder::Build do
it { expect(data[:build_created_at]).to eq(build.created_at) }
it { expect(data[:build_started_at]).to eq(build.started_at) }
it { expect(data[:build_finished_at]).to eq(build.finished_at) }
+ it { expect(data[:build_duration]).to eq(build.duration) }
+ it { expect(data[:build_queued_duration]).to eq(build.queued_duration) }
it { expect(data[:build_allow_failure]).to eq(false) }
it { expect(data[:build_failure_reason]).to eq(build.failure_reason) }
it { expect(data[:project_id]).to eq(build.project.id) }
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index 8fb7ab25b17..d64dfc957ca 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
it 'returns the object kind for a deployment' do
deployment = build(:deployment, deployable: nil, environment: create(:environment))
- data = described_class.build(deployment)
+ data = described_class.build(deployment, Time.current)
expect(data[:object_kind]).to eq('deployment')
end
@@ -21,10 +21,12 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
expected_deployable_url = Gitlab::Routing.url_helpers.project_job_url(deployable.project, deployable)
expected_user_url = Gitlab::Routing.url_helpers.user_url(deployment.deployed_by)
expected_commit_url = Gitlab::UrlBuilder.build(commit)
+ status_changed_at = Time.current
- data = described_class.build(deployment)
+ data = described_class.build(deployment, status_changed_at)
expect(data[:status]).to eq('failed')
+ expect(data[:status_changed_at]).to eq(status_changed_at)
expect(data[:deployable_id]).to eq(deployable.id)
expect(data[:deployable_url]).to eq(expected_deployable_url)
expect(data[:environment]).to eq("somewhere")
@@ -38,7 +40,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
it 'does not include the deployable URL when there is no deployable' do
deployment = create(:deployment, status: :failed, deployable: nil)
- data = described_class.build(deployment)
+ data = described_class.build(deployment, Time.current)
expect(data[:deployable_url]).to be_nil
end
diff --git a/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb b/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb
new file mode 100644
index 00000000000..95863ce3765
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchOptimizer do
+ describe '#optimize' do
+ subject { described_class.new(migration, number_of_jobs: number_of_jobs, ema_alpha: ema_alpha).optimize! }
+
+ let(:migration) { create(:batched_background_migration, batch_size: batch_size, sub_batch_size: 100, interval: 120) }
+
+ let(:batch_size) { 10_000 }
+
+ let_it_be(:number_of_jobs) { 5 }
+ let_it_be(:ema_alpha) { 0.4 }
+
+ let_it_be(:target_efficiency) { described_class::TARGET_EFFICIENCY.max }
+
+ def mock_efficiency(eff)
+ expect(migration).to receive(:smoothed_time_efficiency).with(number_of_jobs: number_of_jobs, alpha: ema_alpha).and_return(eff)
+ end
+
+ it 'with unknown time efficiency, it keeps the batch size' do
+ mock_efficiency(nil)
+
+ expect { subject }.not_to change { migration.reload.batch_size }
+ end
+
+ it 'with a time efficiency of 95%, it keeps the batch size' do
+ mock_efficiency(0.95)
+
+ expect { subject }.not_to change { migration.reload.batch_size }
+ end
+
+ it 'with a time efficiency of 90%, it keeps the batch size' do
+ mock_efficiency(0.9)
+
+ expect { subject }.not_to change { migration.reload.batch_size }
+ end
+
+ it 'with a time efficiency of 85%, it increases the batch size' do
+ time_efficiency = 0.85
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = ((target_efficiency / time_efficiency) * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+
+ it 'with a time efficiency of 110%, it decreases the batch size' do
+ time_efficiency = 1.1
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = ((target_efficiency / time_efficiency) * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+
+ context 'reaching the upper limit for an increase' do
+ it 'caps the batch size multiplier at 20% when increasing' do
+ time_efficiency = 0.1 # this would result in a factor of 10 if not limited
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = (1.2 * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+
+ it 'does not limit the decrease multiplier' do
+ time_efficiency = 10
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = (0.1 * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+ end
+
+ context 'reaching the upper limit for the batch size' do
+ let(:batch_size) { 1_950_000 }
+
+ it 'caps the batch size at 10M' do
+ mock_efficiency(0.7)
+
+ expect { subject }.to change { migration.reload.batch_size }.to(2_000_000)
+ end
+ end
+
+ context 'reaching the lower limit for the batch size' do
+ let(:batch_size) { 1_050 }
+
+ it 'caps the batch size at 1k' do
+ mock_efficiency(1.1)
+
+ expect { subject }.to change { migration.reload.batch_size }.to(1_000)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 1020aafcf08..78e0b7627e9 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -9,6 +9,42 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
it { is_expected.to belong_to(:batched_migration).with_foreign_key(:batched_background_migration_id) }
end
+ describe 'scopes' do
+ let_it_be(:fixed_time) { Time.new(2021, 04, 27, 10, 00, 00, 00) }
+
+ let_it_be(:pending_job) { create(:batched_background_migration_job, status: :pending, updated_at: fixed_time) }
+ let_it_be(:running_job) { create(:batched_background_migration_job, status: :running, updated_at: fixed_time) }
+ let_it_be(:stuck_job) { create(:batched_background_migration_job, status: :pending, updated_at: fixed_time - described_class::STUCK_JOBS_TIMEOUT) }
+ let_it_be(:failed_job) { create(:batched_background_migration_job, status: :failed, attempts: 1) }
+
+ before_all do
+ create(:batched_background_migration_job, status: :failed, attempts: described_class::MAX_ATTEMPTS)
+ create(:batched_background_migration_job, status: :succeeded)
+ end
+
+ before do
+ travel_to fixed_time
+ end
+
+ describe '.active' do
+ it 'returns active jobs' do
+ expect(described_class.active).to contain_exactly(pending_job, running_job, stuck_job)
+ end
+ end
+
+ describe '.stuck' do
+ it 'returns stuck jobs' do
+ expect(described_class.stuck).to contain_exactly(stuck_job)
+ end
+ end
+
+ describe '.retriable' do
+ it 'returns retriable jobs' do
+ expect(described_class.retriable).to contain_exactly(failed_job, stuck_job)
+ end
+ end
+ end
+
describe 'delegated batched_migration attributes' do
let(:batched_job) { build(:batched_background_migration_job) }
let(:batched_migration) { batched_job.batched_migration }
@@ -47,4 +83,55 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
end
+
+ describe '#time_efficiency' do
+ subject { job.time_efficiency }
+
+ let(:migration) { build(:batched_background_migration, interval: 120.seconds) }
+ let(:job) { build(:batched_background_migration_job, status: :succeeded, batched_migration: migration) }
+
+ context 'when job has not yet succeeded' do
+ let(:job) { build(:batched_background_migration_job, status: :running) }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when finished_at is not set' do
+ it 'returns nil' do
+ job.started_at = Time.zone.now
+
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when started_at is not set' do
+ it 'returns nil' do
+ job.finished_at = Time.zone.now
+
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when job has finished' do
+ it 'returns ratio of duration to interval, here: 0.5' do
+ freeze_time do
+ job.started_at = Time.zone.now - migration.interval / 2
+ job.finished_at = Time.zone.now
+
+ expect(subject).to eq(0.5)
+ end
+ end
+
+ it 'returns ratio of duration to interval, here: 1' do
+ freeze_time do
+ job.started_at = Time.zone.now - migration.interval
+ job.finished_at = Time.zone.now
+
+ expect(subject).to eq(1)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index 7d0e10b62c6..9f0493ab0d7 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -17,9 +17,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
it 'marks the migration as finished' do
- relation = Gitlab::Database::BackgroundMigration::BatchedMigration.finished.where(id: migration.id)
+ runner.run_migration_job(migration)
- expect { runner.run_migration_job(migration) }.to change { relation.count }.by(1)
+ expect(migration.reload).to be_finished
end
end
@@ -50,6 +50,15 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
batch_size: migration.batch_size,
sub_batch_size: migration.sub_batch_size)
end
+
+ it 'optimizes the migration after executing the job' do
+ migration.update!(min_value: event1.id, max_value: event2.id)
+
+ expect(migration_wrapper).to receive(:perform).ordered
+ expect(migration).to receive(:optimize!).ordered
+
+ runner.run_migration_job(migration)
+ end
end
context 'when the batch maximum exceeds the migration maximum' do
@@ -83,7 +92,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
let!(:event3) { create(:event) }
let!(:migration) do
- create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event3.id)
+ create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event2.id)
end
let!(:previous_job) do
@@ -92,14 +101,24 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
min_value: event1.id,
max_value: event2.id,
batch_size: 2,
- sub_batch_size: 1)
+ sub_batch_size: 1,
+ status: :succeeded
+ )
end
let(:job_relation) do
Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
end
+ context 'when the migration has no batches remaining' do
+ it_behaves_like 'it has completed the migration'
+ end
+
context 'when the migration has batches to process' do
+ before do
+ migration.update!(max_value: event3.id)
+ end
+
it 'runs the migration job for the next batch' do
expect(migration_wrapper).to receive(:perform) do |job_record|
expect(job_record).to eq(job_relation.last)
@@ -123,17 +142,82 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
end
- context 'when the migration has no batches remaining' do
+ context 'when migration has failed jobs' do
before do
- create(:batched_background_migration_job,
- batched_migration: migration,
- min_value: event3.id,
- max_value: event3.id,
- batch_size: 2,
- sub_batch_size: 1)
+ previous_job.update!(status: :failed)
end
- it_behaves_like 'it has completed the migration'
+ it 'retries the failed job' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(previous_job)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
+ end
+
+ context 'when failed job has reached the maximum number of attempts' do
+ before do
+ previous_job.update!(attempts: Gitlab::Database::BackgroundMigration::BatchedJob::MAX_ATTEMPTS)
+ end
+
+ it 'marks the migration as failed' do
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
+
+ expect(migration).to be_failed
+ end
+ end
+ end
+
+ context 'when migration has stuck jobs' do
+ before do
+ previous_job.update!(status: :running, updated_at: 1.hour.ago - Gitlab::Database::BackgroundMigration::BatchedJob::STUCK_JOBS_TIMEOUT)
+ end
+
+ it 'retries the stuck job' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(previous_job)
+ end
+
+ expect { runner.run_migration_job(migration.reload) }.to change { job_relation.count }.by(0)
+ end
+ end
+
+ context 'when migration has possible stuck jobs' do
+ before do
+ previous_job.update!(status: :running, updated_at: 1.hour.from_now - Gitlab::Database::BackgroundMigration::BatchedJob::STUCK_JOBS_TIMEOUT)
+ end
+
+ it 'keeps the migration active' do
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
+
+ expect(migration.reload).to be_active
+ end
+ end
+
+ context 'when the migration has batches to process and failed jobs' do
+ before do
+ migration.update!(max_value: event3.id)
+ previous_job.update!(status: :failed)
+ end
+
+ it 'runs next batch then retries the failed job' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.last)
+ job_record.update!(status: :succeeded)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(previous_job)
+ end
+
+ expect { runner.run_migration_job(migration.reload) }.to change { job_relation.count }.by(0)
+ end
end
end
end
@@ -180,10 +264,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'runs all jobs inline until finishing the migration' do
expect(migration_wrapper).to receive(:perform) do |job_record|
expect(job_record).to eq(job_relation.first)
+ job_record.update!(status: :succeeded)
end
expect(migration_wrapper).to receive(:perform) do |job_record|
expect(job_record).to eq(job_relation.last)
+ job_record.update!(status: :succeeded)
end
expect { runner.run_entire_migration(migration) }.to change { job_relation.count }.by(2)
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 261e23d0745..43e34325419 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -119,7 +119,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '#create_batched_job!' do
- let(:batched_migration) { create(:batched_background_migration) }
+ let(:batched_migration) do
+ create(:batched_background_migration,
+ batch_size: 999,
+ sub_batch_size: 99,
+ pause_ms: 250
+ )
+ end
it 'creates a batched_job with the correct batch configuration' do
batched_job = batched_migration.create_batched_job!(1, 5)
@@ -128,7 +134,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
min_value: 1,
max_value: 5,
batch_size: batched_migration.batch_size,
- sub_batch_size: batched_migration.sub_batch_size)
+ sub_batch_size: batched_migration.sub_batch_size,
+ pause_ms: 250
+ )
end
end
@@ -196,6 +204,22 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name
end
+ describe '#migrated_tuple_count' do
+ subject { batched_migration.migrated_tuple_count }
+
+ let(:batched_migration) { create(:batched_background_migration) }
+
+ before do
+ create_list(:batched_background_migration_job, 5, status: :succeeded, batch_size: 1_000, batched_migration: batched_migration)
+ create_list(:batched_background_migration_job, 1, status: :running, batch_size: 1_000, batched_migration: batched_migration)
+ create_list(:batched_background_migration_job, 1, status: :failed, batch_size: 1_000, batched_migration: batched_migration)
+ end
+
+ it 'sums the batch_size of succeeded jobs' do
+ expect(subject).to eq(5_000)
+ end
+ end
+
describe '#prometheus_labels' do
let(:batched_migration) { create(:batched_background_migration, job_class_name: 'TestMigration', table_name: 'foo', column_name: 'bar') }
@@ -208,4 +232,96 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(batched_migration.prometheus_labels).to eq(labels)
end
end
+
+ describe '#smoothed_time_efficiency' do
+ let(:migration) { create(:batched_background_migration, interval: 120.seconds) }
+ let(:end_time) { Time.zone.now }
+
+ around do |example|
+ freeze_time do
+ example.run
+ end
+ end
+
+ let(:common_attrs) do
+ {
+ status: :succeeded,
+ batched_migration: migration,
+ finished_at: end_time
+ }
+ end
+
+ context 'when there are not enough jobs' do
+ subject { migration.smoothed_time_efficiency(number_of_jobs: 10) }
+
+ it 'returns nil' do
+ create_list(:batched_background_migration_job, 9, **common_attrs)
+
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when there are enough jobs' do
+ subject { migration.smoothed_time_efficiency(number_of_jobs: number_of_jobs) }
+
+ let!(:jobs) { create_list(:batched_background_migration_job, number_of_jobs, **common_attrs.merge(batched_migration: migration)) }
+ let(:number_of_jobs) { 10 }
+
+ before do
+ expect(migration).to receive_message_chain(:batched_jobs, :successful_in_execution_order, :reverse_order, :limit).with(no_args).with(no_args).with(number_of_jobs).and_return(jobs)
+ end
+
+ def mock_efficiencies(*effs)
+ effs.each_with_index do |eff, i|
+ expect(jobs[i]).to receive(:time_efficiency).and_return(eff)
+ end
+ end
+
+ context 'example 1: increasing trend, but only recently crossed threshold' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(1.1, 1, 0.95, 0.9, 0.8, 0.95, 0.9, 0.8, 0.9, 0.95)
+
+ expect(subject).to be_within(0.05).of(0.95)
+ end
+ end
+
+ context 'example 2: increasing trend, crossed threshold a while ago' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(1.2, 1.1, 1, 1, 1.1, 1, 0.95, 0.9, 0.95, 0.9)
+
+ expect(subject).to be_within(0.05).of(1.1)
+ end
+ end
+
+ context 'example 3: decreasing trend, but only recently crossed threshold' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(0.9, 0.95, 1, 1.2, 1.1, 1.2, 1.1, 1.0, 1.1, 1.0)
+
+ expect(subject).to be_within(0.05).of(1.0)
+ end
+ end
+
+ context 'example 4: latest run spiked' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(1.2, 0.9, 0.8, 0.9, 0.95, 0.9, 0.92, 0.9, 0.95, 0.9)
+
+ expect(subject).to be_within(0.02).of(0.96)
+ end
+ end
+ end
+ end
+
+ describe '#optimize!' do
+ subject { batched_migration.optimize! }
+
+ let(:batched_migration) { create(:batched_background_migration) }
+ let(:optimizer) { instance_double('Gitlab::Database::BackgroundMigration::BatchOptimizer') }
+
+ it 'calls the BatchOptimizer' do
+ expect(Gitlab::Database::BackgroundMigration::BatchOptimizer).to receive(:new).with(batched_migration).and_return(optimizer)
+ expect(optimizer).to receive(:optimize!)
+
+ subject
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 00d13f23d36..c1183a15e37 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -7,9 +7,16 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
+ let_it_be(:pause_ms) { 250 }
let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) }
- let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) }
+ let!(:job_record) do
+ create(:batched_background_migration_job,
+ batched_migration: active_migration,
+ pause_ms: pause_ms
+ )
+ end
+
let(:job_instance) { double('job instance', batch_metrics: {}) }
before do
@@ -17,7 +24,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
it 'runs the migration job' do
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id')
subject
end
@@ -42,6 +49,42 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
end
+ context 'when running a job that failed previously' do
+ let!(:job_record) do
+ create(:batched_background_migration_job,
+ batched_migration: active_migration,
+ pause_ms: pause_ms,
+ attempts: 1,
+ status: :failed,
+ finished_at: 1.hour.ago,
+ metrics: { 'my_metrics' => 'some_value' }
+ )
+ end
+
+ it 'increments attempts and updates other fields' do
+ updated_metrics = { 'updated_metrics' => 'some_value' }
+
+ expect(job_instance).to receive(:perform)
+ expect(job_instance).to receive(:batch_metrics).and_return(updated_metrics)
+
+ expect(job_record).to receive(:update!).with(
+ hash_including(attempts: 2, status: :running, finished_at: nil, metrics: {})
+ ).and_call_original
+
+ freeze_time do
+ subject
+
+ job_record.reload
+
+ expect(job_record).not_to be_failed
+ expect(job_record.attempts).to eq(2)
+ expect(job_record.started_at).to eq(Time.current)
+ expect(job_record.finished_at).to eq(Time.current)
+ expect(job_record.metrics).to eq(updated_metrics)
+ end
+ end
+ end
+
context 'reporting prometheus metrics' do
let(:labels) { job_record.batched_migration.prometheus_labels }
@@ -61,12 +104,26 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
subject
end
+ it 'reports interval' do
+ expect(described_class.metrics[:gauge_interval]).to receive(:set).with(labels, job_record.batched_migration.interval)
+
+ subject
+ end
+
it 'reports updated tuples (currently based on batch_size)' do
expect(described_class.metrics[:counter_updated_tuples]).to receive(:increment).with(labels, job_record.batch_size)
subject
end
+ it 'reports migrated tuples' do
+ count = double
+ expect(job_record.batched_migration).to receive(:migrated_tuple_count).and_return(count)
+ expect(described_class.metrics[:gauge_migrated_tuples]).to receive(:set).with(labels, count)
+
+ subject
+ end
+
it 'reports summary of query timings' do
metrics = { 'timings' => { 'update_all' => [1, 2, 3, 4, 5] } }
@@ -82,14 +139,26 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
subject
end
- it 'reports time efficiency' do
+ it 'reports job duration' do
freeze_time do
expect(Time).to receive(:current).and_return(Time.zone.now - 5.seconds).ordered
- expect(Time).to receive(:current).and_return(Time.zone.now).ordered
+ allow(Time).to receive(:current).and_call_original
+
+ expect(described_class.metrics[:gauge_job_duration]).to receive(:set).with(labels, 5.seconds)
- ratio = 5 / job_record.batched_migration.interval.to_f
+ subject
+ end
+ end
- expect(described_class.metrics[:histogram_time_efficiency]).to receive(:observe).with(labels, ratio)
+ it 'reports the total tuple count for the migration' do
+ expect(described_class.metrics[:gauge_total_tuple_count]).to receive(:set).with(labels, job_record.batched_migration.total_tuple_count)
+
+ subject
+ end
+
+ it 'reports last updated at timestamp' do
+ freeze_time do
+ expect(described_class.metrics[:gauge_last_update_time]).to receive(:set).with(labels, Time.current.to_i)
subject
end
@@ -98,7 +167,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the migration job does not raise an error' do
it 'marks the tracking record as succeeded' do
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id')
freeze_time do
subject
@@ -112,19 +181,24 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
context 'when the migration job raises an error' do
- it 'marks the tracking record as failed before raising the error' do
- expect(job_instance).to receive(:perform)
- .with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- .and_raise(RuntimeError, 'Something broke!')
+ shared_examples 'an error is raised' do |error_class|
+ it 'marks the tracking record as failed' do
+ expect(job_instance).to receive(:perform)
+ .with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id')
+ .and_raise(error_class)
- freeze_time do
- expect { subject }.to raise_error(RuntimeError, 'Something broke!')
+ freeze_time do
+ expect { subject }.to raise_error(error_class)
- reloaded_job_record = job_record.reload
+ reloaded_job_record = job_record.reload
- expect(reloaded_job_record).to be_failed
- expect(reloaded_job_record.finished_at).to eq(Time.current)
+ expect(reloaded_job_record).to be_failed
+ expect(reloaded_job_record.finished_at).to eq(Time.current)
+ end
end
end
+
+ it_behaves_like 'an error is raised', RuntimeError.new('Something broke!')
+ it_behaves_like 'an error is raised', SignalException.new('SIGTERM')
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb b/spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb
new file mode 100644
index 00000000000..e11ffe53c61
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::CascadingNamespaceSettings do
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ describe '#add_cascading_namespace_setting' do
+ it 'creates the required columns', :aggregate_failures do
+ expect(migration).to receive(:add_column).with(:namespace_settings, :some_setting, :integer, null: true, default: nil)
+ expect(migration).to receive(:add_column).with(:namespace_settings, :lock_some_setting, :boolean, null: false, default: false)
+
+ expect(migration).to receive(:add_column).with(:application_settings, :some_setting, :integer, null: false, default: 5)
+ expect(migration).to receive(:add_column).with(:application_settings, :lock_some_setting, :boolean, null: false, default: false)
+
+ migration.add_cascading_namespace_setting(:some_setting, :integer, null: false, default: 5)
+ end
+
+ context 'when columns already exist' do
+ before do
+ migration.add_column(:namespace_settings, :cascading_setting, :integer)
+ migration.add_column(:application_settings, :lock_cascading_setting, :boolean)
+ end
+
+ it 'raises an error when some columns already exist' do
+ expect do
+ migration.add_cascading_namespace_setting(:cascading_setting, :integer)
+ end.to raise_error %r/Existing columns: namespace_settings.cascading_setting, application_settings.lock_cascading_setting/
+ end
+ end
+ end
+
+ describe '#remove_cascading_namespace_setting' do
+ before do
+ allow(migration).to receive(:column_exists?).and_return(true)
+ end
+
+ it 'removes the columns', :aggregate_failures do
+ expect(migration).to receive(:remove_column).with(:namespace_settings, :some_setting)
+ expect(migration).to receive(:remove_column).with(:namespace_settings, :lock_some_setting)
+
+ expect(migration).to receive(:remove_column).with(:application_settings, :some_setting)
+ expect(migration).to receive(:remove_column).with(:application_settings, :lock_some_setting)
+
+ migration.remove_cascading_namespace_setting(:some_setting)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 44293086e79..40720628a89 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers do
include Database::TableSchemaHelpers
+ include Database::TriggerHelpers
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
@@ -834,7 +835,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'renames a column concurrently' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, :new)
expect(model).to receive(:add_column)
@@ -946,7 +947,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'reverses the operations of rename_column_concurrently' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ expect(model).to receive(:remove_rename_triggers)
.with(:users, /trigger_.{12}/)
expect(model).to receive(:remove_column).with(:users, :new)
@@ -959,7 +960,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'cleans up the renaming procedure' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ expect(model).to receive(:remove_rename_triggers)
.with(:users, /trigger_.{12}/)
expect(model).to receive(:remove_column).with(:users, :old)
@@ -999,7 +1000,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'reverses the operations of cleanup_concurrent_column_rename' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, :new)
expect(model).to receive(:add_column)
@@ -1094,7 +1095,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'reverses the operations of change_column_type_concurrently' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ expect(model).to receive(:remove_rename_triggers)
.with(:users, /trigger_.{12}/)
expect(model).to receive(:remove_column).with(:users, "old_for_type_change")
@@ -1159,7 +1160,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:rename_column)
.with(:users, temp_undo_cleanup_column, :old)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(:users, :old, :string)
@@ -1185,7 +1186,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:rename_column)
.with(:users, temp_undo_cleanup_column, :old)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(
@@ -1206,8 +1207,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#install_rename_triggers_for_postgresql' do
- it 'installs the triggers for PostgreSQL' do
+ describe '#install_rename_triggers' do
+ it 'installs the triggers' do
copy_trigger = double('copy trigger')
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
@@ -1215,11 +1216,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(copy_trigger).to receive(:create).with(:old, :new, trigger_name: 'foo')
- model.install_rename_triggers_for_postgresql(:users, :old, :new, trigger_name: 'foo')
+ model.install_rename_triggers(:users, :old, :new, trigger_name: 'foo')
end
end
- describe '#remove_rename_triggers_for_postgresql' do
+ describe '#remove_rename_triggers' do
it 'removes the function and trigger' do
copy_trigger = double('copy trigger')
@@ -1228,7 +1229,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(copy_trigger).to receive(:drop).with('foo')
- model.remove_rename_triggers_for_postgresql('bar', 'foo')
+ model.remove_rename_triggers('bar', 'foo')
end
end
@@ -1702,10 +1703,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#convert_to_bigint_column' do
+ it 'returns the name of the temporary column used to convert to bigint' do
+ expect(model.convert_to_bigint_column(:id)).to eq('id_convert_to_bigint')
+ end
+ end
+
describe '#initialize_conversion_of_integer_to_bigint' do
let(:table) { :test_table }
let(:column) { :id }
- let(:tmp_column) { "#{column}_convert_to_bigint" }
+ let(:tmp_column) { model.convert_to_bigint_column(column) }
before do
model.create_table table, id: false do |t|
@@ -1730,12 +1737,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- context 'when the column to convert does not exist' do
- let(:column) { :foobar }
-
+ context 'when the column to migrate does not exist' do
it 'raises an error' do
- expect { model.initialize_conversion_of_integer_to_bigint(table, column) }
- .to raise_error("Column #{column} does not exist on #{table}")
+ expect { model.initialize_conversion_of_integer_to_bigint(table, :this_column_is_not_real) }
+ .to raise_error(ArgumentError, "Column this_column_is_not_real does not exist on #{table}")
end
end
@@ -1743,7 +1748,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'creates a not-null bigint column and installs triggers' do
expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
- expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+ expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column])
model.initialize_conversion_of_integer_to_bigint(table, column)
end
@@ -1755,7 +1760,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'creates a not-null bigint column and installs triggers' do
expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
- expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+ expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column])
model.initialize_conversion_of_integer_to_bigint(table, column)
end
@@ -1767,22 +1772,83 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'creates a nullable bigint column and installs triggers' do
expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: nil)
- expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+ expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column])
model.initialize_conversion_of_integer_to_bigint(table, column)
end
end
+
+ context 'when multiple columns are given' do
+ it 'creates the correct columns and installs the trigger' do
+ columns_to_convert = %i[id non_nullable_column nullable_column]
+ temporary_columns = columns_to_convert.map { |column| model.convert_to_bigint_column(column) }
+
+ expect(model).to receive(:add_column).with(table, temporary_columns[0], :bigint, default: 0, null: false)
+ expect(model).to receive(:add_column).with(table, temporary_columns[1], :bigint, default: 0, null: false)
+ expect(model).to receive(:add_column).with(table, temporary_columns[2], :bigint, default: nil)
+
+ expect(model).to receive(:install_rename_triggers).with(table, columns_to_convert, temporary_columns)
+
+ model.initialize_conversion_of_integer_to_bigint(table, columns_to_convert)
+ end
+ end
+ end
+
+ describe '#revert_initialize_conversion_of_integer_to_bigint' do
+ let(:table) { :test_table }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer :id, primary_key: true
+ t.integer :other_id
+ end
+
+ model.initialize_conversion_of_integer_to_bigint(table, columns)
+ end
+
+ context 'when single column is given' do
+ let(:columns) { :id }
+
+ it 'removes column, trigger, and function' do
+ temporary_column = model.convert_to_bigint_column(:id)
+ trigger_name = model.rename_trigger_name(table, :id, temporary_column)
+
+ model.revert_initialize_conversion_of_integer_to_bigint(table, columns)
+
+ expect(model.column_exists?(table, temporary_column)).to eq(false)
+ expect_trigger_not_to_exist(table, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+ end
+ end
+
+ context 'when multiple columns are given' do
+ let(:columns) { [:id, :other_id] }
+
+ it 'removes column, trigger, and function' do
+ temporary_columns = columns.map { |column| model.convert_to_bigint_column(column) }
+ trigger_name = model.rename_trigger_name(table, columns, temporary_columns)
+
+ model.revert_initialize_conversion_of_integer_to_bigint(table, columns)
+
+ temporary_columns.each do |column|
+ expect(model.column_exists?(table, column)).to eq(false)
+ end
+ expect_trigger_not_to_exist(table, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+ end
+ end
end
describe '#backfill_conversion_of_integer_to_bigint' do
let(:table) { :_test_backfill_table }
let(:column) { :id }
- let(:tmp_column) { "#{column}_convert_to_bigint" }
+ let(:tmp_column) { model.convert_to_bigint_column(column) }
before do
model.create_table table, id: false do |t|
t.integer :id, primary_key: true
t.text :message, null: false
+ t.integer :other_id
t.timestamps
end
@@ -1808,14 +1874,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'raises an error' do
expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
- .to raise_error("Column #{column} does not exist on #{table}")
+ .to raise_error(ArgumentError, "Column #{column} does not exist on #{table}")
end
end
context 'when the temporary column does not exist' do
it 'raises an error' do
expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
- .to raise_error('The temporary column does not exist, initialize it with `initialize_conversion_of_integer_to_bigint`')
+ .to raise_error(ArgumentError, "Column #{tmp_column} does not exist on #{table}")
end
end
@@ -1829,48 +1895,112 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:migration_relation) { Gitlab::Database::BackgroundMigration::BatchedMigration.active }
before do
- model.initialize_conversion_of_integer_to_bigint(table, column)
+ model.initialize_conversion_of_integer_to_bigint(table, columns)
model_class.create!(message: 'hello')
model_class.create!(message: 'so long')
end
- it 'creates the batched migration tracking record' do
- last_record = model_class.create!(message: 'goodbye')
+ context 'when a single column is being converted' do
+ let(:columns) { column }
- expect do
- model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
- end.to change { migration_relation.count }.by(1)
-
- expect(migration_relation.last).to have_attributes(
- job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
- table_name: table.to_s,
- column_name: column.to_s,
- min_value: 1,
- max_value: last_record.id,
- interval: 120,
- batch_size: 2,
- sub_batch_size: 1,
- job_arguments: [column.to_s, "#{column}_convert_to_bigint"]
- )
+ it 'creates the batched migration tracking record' do
+ last_record = model_class.create!(message: 'goodbye')
+
+ expect do
+ model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ end.to change { migration_relation.count }.by(1)
+
+ expect(migration_relation.last).to have_attributes(
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: table.to_s,
+ column_name: column.to_s,
+ min_value: 1,
+ max_value: last_record.id,
+ interval: 120,
+ batch_size: 2,
+ sub_batch_size: 1,
+ job_arguments: [[column.to_s], [model.convert_to_bigint_column(column)]]
+ )
+ end
end
- context 'when the migration should be performed inline' do
- it 'calls the runner to run the entire migration' do
- expect(model).to receive(:perform_background_migration_inline?).and_return(true)
+ context 'when multiple columns are being converted' do
+ let(:other_column) { :other_id }
+ let(:other_tmp_column) { model.convert_to_bigint_column(other_column) }
+ let(:columns) { [column, other_column] }
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |scheduler|
- expect(scheduler).to receive(:run_entire_migration) do |batched_migration|
- expect(batched_migration).to eq(migration_relation.last)
- end
- end
+ it 'creates the batched migration tracking record' do
+ last_record = model_class.create!(message: 'goodbye', other_id: 50)
- model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ expect do
+ model.backfill_conversion_of_integer_to_bigint(table, columns, batch_size: 2, sub_batch_size: 1)
+ end.to change { migration_relation.count }.by(1)
+
+ expect(migration_relation.last).to have_attributes(
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: table.to_s,
+ column_name: column.to_s,
+ min_value: 1,
+ max_value: last_record.id,
+ interval: 120,
+ batch_size: 2,
+ sub_batch_size: 1,
+ job_arguments: [[column.to_s, other_column.to_s], [tmp_column, other_tmp_column]]
+ )
end
end
end
end
+ describe '#revert_backfill_conversion_of_integer_to_bigint' do
+ let(:table) { :_test_backfill_table }
+ let(:primary_key) { :id }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer primary_key, primary_key: true
+ t.text :message, null: false
+ t.integer :other_id
+ t.timestamps
+ end
+
+ model.initialize_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
+ model.backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
+ end
+
+ context 'when a single column is being converted' do
+ let(:columns) { :id }
+
+ it 'deletes the batched migration tracking record' do
+ expect do
+ model.revert_backfill_conversion_of_integer_to_bigint(table, columns)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1)
+ end
+ end
+
+ context 'when a multiple columns are being converted' do
+ let(:columns) { [:id, :other_id] }
+
+ it 'deletes the batched migration tracking record' do
+ expect do
+ model.revert_backfill_conversion_of_integer_to_bigint(table, columns)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1)
+ end
+ end
+
+ context 'when primary key column has custom name' do
+ let(:primary_key) { :other_pk }
+ let(:columns) { :other_id }
+
+ it 'deletes the batched migration tracking record' do
+ expect do
+ model.revert_backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1)
+ end
+ end
+ end
+
describe '#index_exists_by_name?' do
it 'returns true if an index exists' do
ActiveRecord::Base.connection.execute(
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index 3804dc52a77..6d047eed3bb 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
before do
instance.observe(migration) { raise 'something went wrong' }
- rescue
+ rescue StandardError
# ignore
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
new file mode 100644
index 00000000000..195e7114582
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
+ subject { described_class.new }
+
+ let(:observation) { Gitlab::Database::Migrations::Observation.new(migration) }
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:query) { 'select 1' }
+ let(:directory_path) { Dir.mktmpdir }
+ let(:log_file) { "#{directory_path}/current.log" }
+ let(:migration) { 20210422152437 }
+
+ before do
+ stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory_path)
+ end
+
+ after do
+ FileUtils.remove_entry(directory_path)
+ end
+
+ it 'writes a file with the query log' do
+ observe
+
+ expect(File.read("#{directory_path}/#{migration}.log")).to include(query)
+ end
+
+ it 'does not change the default logger' do
+ expect { observe }.not_to change { ActiveRecord::Base.logger }
+ end
+
+ def observe
+ subject.before
+ connection.execute(query)
+ subject.after
+ subject.record(observation)
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index 93dbd9d7c30..83f2436043c 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
end
let_it_be(:connection) { ActiveRecord::Base.connection }
+
let(:referenced_table) { :issues }
let(:function_name) { '_test_partitioned_foreign_keys_function' }
let(:trigger_name) { '_test_partitioned_foreign_keys_trigger' }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
index 603f3dc41af..c3edc3a0c87 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
def expect_add_concurrent_index_and_call_original(table, column, index)
expect(migration).to receive(:add_concurrent_index).ordered.with(table, column, name: index)
- .and_wrap_original { |_, table, column, options| connection.add_index(table, column, options) }
+ .and_wrap_original { |_, table, column, options| connection.add_index(table, column, **options) }
end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 5b2a29d1d2d..79ddb450d7a 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
let_it_be(:connection) { ActiveRecord::Base.connection }
+
let(:source_table) { :_test_original_table }
let(:partitioned_table) { '_test_migration_partitioned_table' }
let(:function_name) { '_test_migration_function_name' }
diff --git a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
index 51fc7c6620b..d9077969003 100644
--- a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
end
it 'replaces the existing index with an identical index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').twice
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
expect_to_execute_concurrently_in_order(create_index)
@@ -123,6 +123,10 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(replacement_name, index.name)
expect_index_rename(replaced_name, replacement_name)
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
expect_to_execute_concurrently_in_order(drop_index)
subject.perform
@@ -136,7 +140,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
end
it 'rebuilds table statistics before dropping the original index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').twice
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
expect_to_execute_concurrently_in_order(create_index)
@@ -152,7 +156,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(replacement_name, index.name)
expect_index_rename(replaced_name, replacement_name)
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
subject.perform
@@ -166,9 +170,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
end
it 'replaces the existing index with an identical index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').exactly(3).times
-
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
expect_to_execute_concurrently_in_order(create_index)
expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
@@ -179,7 +181,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(replacement_name, index.name)
expect_index_rename(replaced_name, replacement_name)
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
subject.perform
@@ -192,6 +194,10 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect(connection).to receive(:execute).with(create_index).ordered
.and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
expect_to_execute_concurrently_in_order(drop_index)
expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
@@ -207,6 +213,10 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_to_execute_concurrently_in_order(create_index)
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
expect_to_execute_concurrently_in_order(drop_index)
expect { subject.perform }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
@@ -228,7 +238,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(index.name, replaced_name).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
@@ -245,7 +255,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
.and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
end
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
expect { subject.perform }.to raise_error(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, /exhausted/)
@@ -270,6 +280,14 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
SQL
end
+ def expect_index_drop(drop_index)
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
+ expect_to_execute_concurrently_in_order(drop_index)
+ end
+
def find_index_create_statement
ActiveRecord::Base.connection.select_value(<<~SQL)
SELECT indexdef
diff --git a/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb
new file mode 100644
index 00000000000..8c0c4155ccc
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaCacheWithRenamedTable do
+ let(:old_model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'projects'
+ end
+ end
+
+ let(:new_model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'projects_new'
+ end
+ end
+
+ before do
+ stub_const('Gitlab::Database::TABLES_TO_BE_RENAMED', { 'projects' => 'projects_new' })
+ end
+
+ context 'when table is not renamed yet' do
+ before do
+ old_model.reset_column_information
+ ActiveRecord::Base.connection.schema_cache.clear!
+ end
+
+ it 'uses the original table to look up metadata' do
+ expect(old_model.primary_key).to eq('id')
+ end
+ end
+
+ context 'when table is renamed' do
+ before do
+ ActiveRecord::Base.connection.execute("ALTER TABLE projects RENAME TO projects_new")
+ ActiveRecord::Base.connection.execute("CREATE VIEW projects AS SELECT * FROM projects_new")
+
+ old_model.reset_column_information
+ ActiveRecord::Base.connection.schema_cache.clear!
+ end
+
+ it 'uses the renamed table to look up metadata' do
+ expect(old_model.primary_key).to eq('id')
+ end
+
+ it 'has primary key' do
+ expect(old_model.primary_key).to eq('id')
+ expect(old_model.primary_key).to eq(new_model.primary_key)
+ end
+
+ it 'has the same column definitions' do
+ expect(old_model.columns).to eq(new_model.columns)
+ end
+
+ it 'has the same indexes' do
+ indexes_for_old_table = ActiveRecord::Base.connection.schema_cache.indexes('projects')
+ indexes_for_new_table = ActiveRecord::Base.connection.schema_cache.indexes('projects_new')
+
+ expect(indexes_for_old_table).to eq(indexes_for_new_table)
+ end
+
+ it 'has the same column_hash' do
+ columns_hash_for_old_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects')
+ columns_hash_for_new_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects_new')
+
+ expect(columns_hash_for_old_table).to eq(columns_hash_for_new_table)
+ end
+
+ describe 'when the table behind a model is actually a view' do
+ let(:group) { create(:group) }
+ let(:project_attributes) { attributes_for(:project, namespace_id: group.id).except(:creator) }
+ let(:record) { old_model.create!(project_attributes) }
+
+ it 'can persist records' do
+ expect(record.reload.attributes).to eq(new_model.find(record.id).attributes)
+ end
+
+ it 'can find records' do
+ expect(old_model.find_by_id(record.id)).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
new file mode 100644
index 00000000000..e93d8ab590d
--- /dev/null
+++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
@@ -0,0 +1,244 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do
+ let(:env) { {} }
+ let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER }
+ let(:subject) { described_class.new(env: env, logger: logger, timing_configuration: timing_configuration) }
+
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second]
+ ]
+ end
+
+ describe '#run' do
+ it 'requires block' do
+ expect { subject.run }.to raise_error(StandardError, 'no block given')
+ end
+
+ context 'when DISABLE_LOCK_RETRIES is set' do
+ let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
+
+ it 'executes the passed block without retrying' do
+ object = double
+
+ expect(object).to receive(:method).once
+
+ subject.run { object.method }
+ end
+ end
+
+ context 'when lock retry is enabled' do
+ let(:lock_fiber) do
+ Fiber.new do
+ # Initiating a second DB connection for the lock
+ conn = ActiveRecordSecond.establish_connection(Rails.configuration.database_configuration[Rails.env]).connection
+ conn.transaction do
+ conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+
+ Fiber.yield
+ end
+ ActiveRecordSecond.remove_connection # force disconnect
+ end
+ end
+
+ before do
+ stub_const('ActiveRecordSecond', Class.new(ActiveRecord::Base))
+
+ lock_fiber.resume # start the transaction and lock the table
+ end
+
+ after do
+ lock_fiber.resume if lock_fiber.alive?
+ end
+
+ context 'lock_fiber' do
+ it 'acquires lock successfully' do
+ check_exclusive_lock_query = """
+ SELECT 1
+ FROM pg_locks l
+ JOIN pg_class t ON l.relation = t.oid
+ WHERE t.relkind = 'r' AND l.mode = 'ExclusiveLock' AND t.relname = '#{Project.table_name}'
+ """
+
+ expect(ActiveRecord::Base.connection.execute(check_exclusive_lock_query).to_a).to be_present
+ end
+ end
+
+ shared_examples 'retriable exclusive lock on `projects`' do
+ it 'succeeds executing the given block' do
+ lock_attempts = 0
+ lock_acquired = false
+
+ # the actual number of attempts to run_block_with_lock_timeout can never exceed the number of
+ # timings_configurations, so here we limit the retry_count if it exceeds that value
+ #
+ # also, there is no call to sleep after the final attempt, which is why it will always be one less
+ expected_runs_with_timeout = [retry_count, timing_configuration.size].min
+ expect(subject).to receive(:sleep).exactly(expected_runs_with_timeout - 1).times
+
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
+ lock_fiber.resume if lock_attempts == retry_count
+
+ method.call
+ end
+
+ subject.run do
+ lock_attempts += 1
+
+ if lock_attempts == retry_count # we reached the last retry iteration, if we kill the thread, the last try (no lock_timeout) will succeed
+ lock_fiber.resume
+ end
+
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ lock_acquired = true
+ end
+ end
+
+ expect(lock_attempts).to eq(retry_count)
+ expect(lock_acquired).to eq(true)
+ end
+ end
+
+ context 'after 3 iterations' do
+ it_behaves_like 'retriable exclusive lock on `projects`' do
+ let(:retry_count) { 4 }
+ end
+
+ context 'setting the idle transaction timeout' do
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the idle transaction timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once
+
+ expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
+
+ subject.run {}
+ end
+ end
+
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the idle transaction timeout so the code can sleep and retry' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+
+ n = 0
+ allow(subject).to receive(:run_block_with_lock_timeout).twice do
+ n += 1
+ raise(ActiveRecord::LockWaitTimeout) if n == 1
+ end
+
+ expect(subject).to receive(:disable_idle_in_transaction_timeout).once
+
+ subject.run {}
+ end
+ end
+ end
+ end
+
+ context 'after the retries are exhausted' do
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second]
+ ]
+ end
+
+ it 'disables the lock_timeout' do
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
+
+ context 'after the retries, without setting lock_timeout' do
+ let(:retry_count) { timing_configuration.size + 1 }
+
+ it_behaves_like 'retriable exclusive lock on `projects`' do
+ before do
+ expect(subject).to receive(:run_block_without_lock_timeout).and_call_original
+ end
+ end
+ end
+
+ context 'after the retries, when requested to raise an error' do
+ let(:expected_attempts_with_timeout) { timing_configuration.size }
+ let(:retry_count) { timing_configuration.size + 1 }
+
+ it 'raises an error instead of waiting indefinitely for the lock' do
+ lock_attempts = 0
+ lock_acquired = false
+
+ expect(subject).to receive(:sleep).exactly(expected_attempts_with_timeout - 1).times
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_attempts_with_timeout).times.and_call_original
+
+ expect do
+ subject.run(raise_on_exhaustion: true) do
+ lock_attempts += 1
+
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ lock_acquired = true
+ end
+ end
+ end.to raise_error(described_class::AttemptsExhaustedError)
+
+ expect(lock_attempts).to eq(retry_count - 1)
+ expect(lock_acquired).to eq(false)
+ end
+ end
+
+ context 'when statement timeout is reached' do
+ it 'raises StatementInvalid error' do
+ lock_acquired = false
+ ActiveRecord::Base.connection.execute("SET statement_timeout='100ms'")
+
+ expect do
+ subject.run do
+ ActiveRecord::Base.connection.execute("SELECT 1 FROM pg_sleep(0.11)") # 110ms
+ lock_acquired = true
+ end
+ end.to raise_error(ActiveRecord::StatementInvalid)
+
+ expect(lock_acquired).to eq(false)
+ end
+ end
+ end
+ end
+
+ context 'restore local database variables' do
+ it do
+ expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW lock_timeout").to_a }
+ end
+
+ it do
+ expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW idle_in_transaction_session_timeout").to_a }
+ end
+ end
+
+ context 'casting durations correctly' do
+ let(:timing_configuration) { [[0.015.seconds, 0.025.seconds], [0.015.seconds, 0.025.seconds]] } # 15ms, 25ms
+
+ it 'executes `SET lock_timeout` using the configured timeout value in milliseconds' do
+ expect(ActiveRecord::Base.connection).to receive(:execute).with('RESET idle_in_transaction_session_timeout; RESET lock_timeout').and_call_original
+ expect(ActiveRecord::Base.connection).to receive(:execute).with("SET lock_timeout TO '15ms'").and_call_original
+
+ subject.run { }
+ end
+
+ it 'calls `sleep` after the first iteration fails, using the configured sleep time' do
+ expect(subject).to receive(:run_block_with_lock_timeout).and_raise(ActiveRecord::LockWaitTimeout).twice
+ expect(subject).to receive(:sleep).with(0.025)
+
+ subject.run { }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 563399ff0d9..b08f39fc92a 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -76,14 +76,14 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_attempts = 0
lock_acquired = false
- # the actual number of attempts to run_block_with_transaction can never exceed the number of
+ # the actual number of attempts to run_block_with_lock_timeout can never exceed the number of
# timings_configurations, so here we limit the retry_count if it exceeds that value
#
# also, there is no call to sleep after the final attempt, which is why it will always be one less
expected_runs_with_timeout = [retry_count, timing_configuration.size].min
expect(subject).to receive(:sleep).exactly(expected_runs_with_timeout - 1).times
- expect(subject).to receive(:run_block_with_transaction).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
lock_fiber.resume if lock_attempts == retry_count
method.call
@@ -116,8 +116,8 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the idle transaction timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
- allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
- allow(subject).to receive(:run_block_with_transaction).once
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once
expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
@@ -130,7 +130,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
n = 0
- allow(subject).to receive(:run_block_with_transaction).twice do
+ allow(subject).to receive(:run_block_with_lock_timeout).twice do
n += 1
raise(ActiveRecord::LockWaitTimeout) if n == 1
end
@@ -153,7 +153,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the lock_timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
- allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).not_to receive(:disable_lock_timeout)
@@ -164,7 +164,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the lock_timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
- allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).to receive(:disable_lock_timeout)
@@ -192,7 +192,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_acquired = false
expect(subject).to receive(:sleep).exactly(expected_attempts_with_timeout - 1).times
- expect(subject).to receive(:run_block_with_transaction).exactly(expected_attempts_with_timeout).times.and_call_original
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_attempts_with_timeout).times.and_call_original
expect do
subject.run(raise_on_exhaustion: true) do
@@ -251,7 +251,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
it 'calls `sleep` after the first iteration fails, using the configured sleep time' do
- expect(subject).to receive(:run_block_with_transaction).and_raise(ActiveRecord::LockWaitTimeout).twice
+ expect(subject).to receive(:run_block_with_lock_timeout).and_raise(ActiveRecord::LockWaitTimeout).twice
expect(subject).to receive(:sleep).with(0.025)
subject.run { }
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index 417bf3e363a..28291508ac0 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -137,11 +137,11 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'creates a Prometheus service' do
expect(result[:status]).to eq(:success)
- services = result[:project].reload.services
+ integrations = result[:project].reload.integrations
- expect(services.count).to eq(1)
+ expect(integrations.count).to eq(1)
# Ensures PrometheusService#self_monitoring_project? is true
- expect(services.first.allow_local_api_url?).to be_truthy
+ expect(integrations.first.allow_local_api_url?).to be_truthy
end
it 'creates an environment for the project' do
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index b735ac7940b..663c8d69328 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -15,9 +15,29 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.default_pool_size' do
+ before do
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
+ end
+
+ it 'returns the max thread size plus a fixed headroom of 10' do
+ expect(described_class.default_pool_size).to eq(17)
+ end
+
+ it 'returns the max thread size plus a DB_POOL_HEADROOM if this env var is present' do
+ stub_env('DB_POOL_HEADROOM', '7')
+
+ expect(described_class.default_pool_size).to eq(14)
+ end
+ end
+
describe '.config' do
- it 'returns a Hash' do
- expect(described_class.config).to be_an_instance_of(Hash)
+ it 'returns a HashWithIndifferentAccess' do
+ expect(described_class.config).to be_an_instance_of(HashWithIndifferentAccess)
+ end
+
+ it 'returns a default pool size' do
+ expect(described_class.config).to include(pool: described_class.default_pool_size)
end
end
@@ -176,7 +196,7 @@ RSpec.describe Gitlab::Database do
closed_pool = pool
- raise error.new('boom')
+ raise error, 'boom'
end
rescue error
end
@@ -395,13 +415,13 @@ RSpec.describe Gitlab::Database do
allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
end
- it 'detects a read only database' do
+ it 'detects a read-only database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "t" }])
expect(described_class.db_read_only?).to be_truthy
end
- it 'detects a read only database' do
+ it 'detects a read-only database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => true }])
expect(described_class.db_read_only?).to be_truthy
diff --git a/spec/lib/gitlab/default_branch_spec.rb b/spec/lib/gitlab/default_branch_spec.rb
new file mode 100644
index 00000000000..b066815612c
--- /dev/null
+++ b/spec/lib/gitlab/default_branch_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# We disabled main_branch_over_master feature for tests
+# In order to have consistent branch usages
+# When we migrate the branch name to main, we can enable it
+RSpec.describe Gitlab::DefaultBranch do
+ context 'main_branch_over_master is enabled' do
+ before do
+ stub_feature_flags(main_branch_over_master: true)
+ end
+
+ it 'returns main' do
+ expect(described_class.value).to eq('main')
+ end
+ end
+
+ context 'main_branch_over_master is disabled' do
+ it 'returns master' do
+ expect(described_class.value).to eq('master')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index 32ca6e4fde6..94d3b2ad0b3 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::Highlight do
include RepoHelpers
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:commit) { project.commit(sample_commit.id) }
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
@@ -156,5 +157,34 @@ RSpec.describe Gitlab::Diff::Highlight do
it_behaves_like 'without inline diffs'
end
end
+
+ context 'when blob is too large' do
+ let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
+
+ before do
+ allow(Gitlab::Highlight).to receive(:too_large?).and_return(true)
+ end
+
+ it 'blobs are highlighted as plain text without loading all data' do
+ expect(diff_file.blob).not_to receive(:load_all_data!)
+
+ expect(subject[2].rich_text).to eq(%Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n})
+ expect(subject[2].rich_text).to be_html_safe
+ end
+
+ context 'when limited_diff_highlighting is disabled' do
+ before do
+ stub_feature_flags(limited_diff_highlighting: false)
+ stub_feature_flags(diff_line_syntax_highlighting: false)
+ end
+
+ it 'blobs are highlighted as plain text with loading all data' do
+ expect(diff_file.blob).to receive(:load_all_data!).twice
+
+ code = %Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n}
+ expect(subject[2].rich_text).to eq(code)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 7436765e8ee..6d26b3e1064 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -90,11 +90,6 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when quick actions are present' do
let(:label) { create(:label, project: project, title: 'label1') }
let(:milestone) { create(:milestone, project: project) }
- let!(:user) { create(:user, username: 'user1') }
-
- before do
- project.add_developer(user)
- end
it 'applies quick action commands present on templates' do
file_content = %(Text from template \n/label ~#{label.title} \n/milestone %"#{milestone.name}"")
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
new file mode 100644
index 00000000000..42d84b3e4de
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do
+ let_it_be(:group) { build(:group) }
+
+ let(:series) { 0 }
+ let(:test_class) { Gitlab::Email::Message::InProductMarketing::Create }
+
+ describe 'initialize' do
+ subject { test_class.new(group: group, series: series) }
+
+ context 'when series does not exist' do
+ let(:series) { 3 }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when series exists' do
+ let(:series) { 0 }
+
+ it 'does not raise error' do
+ expect { subject }.not_to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ describe '#logo_path' do
+ subject { test_class.new(group: group, series: series).logo_path }
+
+ it { is_expected.to eq('mailers/in_product_marketing/create-0.png') }
+ end
+
+ describe '#unsubscribe' do
+ subject { test_class.new(group: group, series: series).unsubscribe }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to include('%tag_unsubscribe_url%') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include(Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
+
+ describe '#cta_link' do
+ subject(:cta_link) { test_class.new(group: group, series: series).cta_link }
+
+ it 'renders link' do
+ expect(CGI.unescapeHTML(cta_link)).to include(Gitlab::Routing.url_helpers.group_email_campaigns_url(group, track: :create, series: series))
+ end
+ end
+
+ describe '#progress' do
+ subject { test_class.new(group: group, series: series).progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to include('This is email 1 of 3 in the Create series') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include('This is email 1 of 3 in the Create series', Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
new file mode 100644
index 00000000000..be8a33b18bd
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Create do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ where(series: [0, 1, 2])
+
+ with_them do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
new file mode 100644
index 00000000000..6251128f560
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ where(series: [0, 1])
+
+ with_them do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+
+ context 'with series 2' do
+ let(:series) { 2 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_nil
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
new file mode 100644
index 00000000000..2c435490765
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Trial do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ where(series: [0, 1, 2])
+
+ with_them do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
new file mode 100644
index 00000000000..73252c0dbdf
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Verify do
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ context 'with series 0' do
+ let(:series) { 0 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_nil
+ expect(message.cta_text).to be_present
+ end
+ end
+
+ context 'with series 1' do
+ let(:series) { 1 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+
+ context 'with series 2' do
+ let(:series) { 2 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_nil
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
new file mode 100644
index 00000000000..9ffc4a340a3
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing do
+ describe '.for' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.for(track) }
+
+ context 'when track exists' do
+ where(:track, :expected_class) do
+ :create | described_class::Create
+ :verify | described_class::Verify
+ :trial | described_class::Trial
+ :team | described_class::Team
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_class) }
+ end
+ end
+
+ context 'when track does not exist' do
+ let(:track) { :non_existent }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(described_class::UnknownTrackError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index ccff902d290..9b05c12ef57 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Gitlab::Email::Receiver do
context 'when the email contains a valid email address in a header' do
let(:handler) { double(:handler) }
+ let(:metadata) { receiver.mail_metadata }
before do
allow(handler).to receive(:execute)
@@ -22,24 +23,38 @@ RSpec.describe Gitlab::Email::Receiver do
allow(handler).to receive(:metrics_event)
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
+
+ expect(receiver.mail_metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to))
end
context 'when in a Delivered-To header' do
let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') }
it_behaves_like 'correctly finds the mail key'
+
+ it 'parses the metadata' do
+ expect(metadata[:delivered_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"])
+ end
end
context 'when in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header.eml') }
it_behaves_like 'correctly finds the mail key'
+
+ it 'parses the metadata' do
+ expect(metadata[:envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
+ end
end
context 'when in an X-Envelope-To header' do
let(:email_raw) { fixture_file('emails/x_envelope_to_header.eml') }
it_behaves_like 'correctly finds the mail key'
+
+ it 'parses the metadata' do
+ expect(metadata[:x_envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
+ end
end
context 'when enclosed with angle brackets in an Envelope-To header' do
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 0ea974921bc..cf0d1577314 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -216,4 +216,63 @@ RSpec.describe Gitlab::EncodingHelper do
expect(test).not_to eq(io_stream)
end
end
+
+ describe '#detect_encoding' do
+ subject { ext_class.detect_encoding(data, **kwargs) }
+
+ let(:data) { binary_string }
+ let(:kwargs) { {} }
+
+ shared_examples 'detects encoding' do
+ it { is_expected.to be_a(Hash) }
+
+ it 'correctly detects the binary' do
+ expect(subject[:type]).to eq(:binary)
+ end
+
+ context 'data is nil' do
+ let(:data) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'limit is provided' do
+ let(:kwargs) do
+ { limit: 10 }
+ end
+
+ it 'correctly detects the binary' do
+ expect(subject[:type]).to eq(:binary)
+ end
+ end
+ end
+
+ context 'cached_encoding_detection is enabled' do
+ before do
+ stub_feature_flags(cached_encoding_detection: true)
+ end
+
+ it_behaves_like 'detects encoding'
+
+ context 'cache_key is provided' do
+ let(:kwargs) do
+ { cache_key: %w(foo bar) }
+ end
+
+ it 'uses that cache_key to serve from the cache' do
+ expect(Rails.cache).to receive(:fetch).with([:detect_binary, CharlockHolmes::VERSION, %w(foo bar)], expires_in: 1.week).and_call_original
+
+ expect(subject[:type]).to eq(:binary)
+ end
+ end
+ end
+
+ context 'cached_encoding_detection is disabled' do
+ before do
+ stub_feature_flags(cached_encoding_detection: false)
+ end
+
+ it_behaves_like 'detects encoding'
+ end
+ end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
index 24f5299d357..584eadb24a7 100644
--- a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do
- shared_examples 'processing an exception' do
+ describe '.call' do
+ let(:event) { Raven::Event.new(payload) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
before do
allow_next_instance_of(Gitlab::ErrorTracking::ContextPayloadGenerator) do |generator|
allow(generator).to receive(:generate).and_return(
@@ -37,30 +40,4 @@ RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do
sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] })
end
end
-
- describe '.call' do
- let(:event) { Raven::Event.new(payload) }
- let(:result_hash) { described_class.call(event).to_hash }
-
- it_behaves_like 'processing an exception'
-
- context 'when followed by #process' do
- let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
-
- it_behaves_like 'processing an exception'
- end
- end
-
- describe '#process' do
- let(:event) { Raven::Event.new(payload) }
- let(:result_hash) { described_class.new.process(event.to_hash) }
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- it_behaves_like 'processing an exception'
- end
- end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 4808fdf2f06..727b603feda 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
- shared_examples 'processing an exception' do
+ describe '.call' do
+ let(:event) { Raven::Event.from_exception(exception, data) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
context 'when there is no GRPC exception' do
let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
@@ -56,30 +59,4 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
end
end
end
-
- describe '.call' do
- let(:event) { Raven::Event.from_exception(exception, data) }
- let(:result_hash) { described_class.call(event).to_hash }
-
- it_behaves_like 'processing an exception'
-
- context 'when followed by #process' do
- let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
-
- it_behaves_like 'processing an exception'
- end
- end
-
- describe '#process' do
- let(:event) { Raven::Event.from_exception(exception, data) }
- let(:result_hash) { described_class.new.process(event.to_hash) }
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- it_behaves_like 'processing an exception'
- end
- end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
index 20fd5d085a9..c8a362fcf05 100644
--- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -94,7 +94,10 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
end
- shared_examples 'processing an exception' do
+ describe '.call' do
+ let(:event) { Raven::Event.new(wrapped_value) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
context 'when there is Sidekiq data' do
let(:wrapped_value) { { extra: { sidekiq: value } } }
@@ -168,30 +171,4 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
end
end
-
- describe '.call' do
- let(:event) { Raven::Event.new(wrapped_value) }
- let(:result_hash) { described_class.call(event).to_hash }
-
- it_behaves_like 'processing an exception'
-
- context 'when followed by #process' do
- let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
-
- it_behaves_like 'processing an exception'
- end
- end
-
- describe '#process' do
- let(:event) { Raven::Event.new(wrapped_value) }
- let(:result_hash) { described_class.new.process(event.to_hash) }
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- it_behaves_like 'processing an exception'
- end
- end
end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 2e67a9f0874..7ad1f52780a 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -217,7 +217,7 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
- shared_examples 'event processors' do
+ context 'event processors' do
subject(:track_exception) { described_class.track_exception(exception, extra) }
before do
@@ -312,20 +312,4 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
end
-
- context 'with sentry_processors_before_send enabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: true)
- end
-
- include_examples 'event processors'
- end
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- include_examples 'event processors'
- end
end
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index c748ee00721..ce728c41f48 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -22,6 +22,14 @@ RSpec.describe Gitlab::EtagCaching::Router do
expect(result.name).to eq 'pipelines_graph'
expect(result.router).to eq Gitlab::EtagCaching::Router::Graphql
end
+
+ it 'matches pipeline sha endpoint' do
+ result = match_route('/api/graphql', 'pipelines/sha/4asd12lla2jiwjdqw9as32glm8is8hiu8s2c5jsw')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'ci_editor'
+ expect(result.router).to eq Gitlab::EtagCaching::Router::Graphql
+ end
end
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 3678aeb18b0..5419a01ea3e 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -19,12 +19,15 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
}
)
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(is_gitlab_com)
+
Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
end
let(:enabled_percentage) { 10 }
let(:rollout_strategy) { nil }
+ let(:is_gitlab_com) { true }
controller(ApplicationController) do
include Gitlab::Experimentation::ControllerConcern
@@ -37,17 +40,17 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
describe '#set_experimentation_subject_id_cookie' do
let(:do_not_track) { nil }
let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
+ let(:cookie_value) { nil }
before do
request.headers['DNT'] = do_not_track if do_not_track.present?
+ request.cookies[:experimentation_subject_id] = cookie_value if cookie_value
get :index
end
context 'cookie is present' do
- before do
- cookies[:experimentation_subject_id] = 'test'
- end
+ let(:cookie_value) { 'test' }
it 'does not change the cookie' do
expect(cookies[:experimentation_subject_id]).to eq 'test'
@@ -75,6 +78,24 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
end
end
end
+
+ context 'when not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ context 'when cookie was set' do
+ let(:cookie_value) { 'test' }
+
+ it 'cookie gets deleted' do
+ expect(cookie).not_to be_present
+ end
+ end
+
+ context 'when no cookie was set before' do
+ it 'does nothing' do
+ expect(cookie).not_to be_present
+ end
+ end
+ end
end
describe '#push_frontend_experiment' do
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 5fef14bd2a0..10bfa9e8d0e 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -7,7 +7,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
it 'temporarily ensures we know what experiments exist for backwards compatibility' do
expected_experiment_keys = [
- :invite_members_version_b,
:invite_members_empty_group_version_a,
:contact_sales_btn_in_app
]
diff --git a/spec/lib/gitlab/external_authorization/access_spec.rb b/spec/lib/gitlab/external_authorization/access_spec.rb
index a6773cc19e1..812ef2b54e9 100644
--- a/spec/lib/gitlab/external_authorization/access_spec.rb
+++ b/spec/lib/gitlab/external_authorization/access_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
context 'when the request fails' do
before do
allow(fake_client).to receive(:request_access) do
- raise ::Gitlab::ExternalAuthorization::RequestFailed.new('Service unavailable')
+ raise ::Gitlab::ExternalAuthorization::RequestFailed, 'Service unavailable'
end
end
diff --git a/spec/lib/gitlab/external_authorization/client_spec.rb b/spec/lib/gitlab/external_authorization/client_spec.rb
index c08da382486..b907b0bb262 100644
--- a/spec/lib/gitlab/external_authorization/client_spec.rb
+++ b/spec/lib/gitlab/external_authorization/client_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
end
it 'wraps exceptions if the request fails' do
- expect(Gitlab::HTTP).to receive(:post) { raise Gitlab::HTTP::BlockedUrlError.new('the request broke') }
+ expect(Gitlab::HTTP).to receive(:post) { raise Gitlab::HTTP::BlockedUrlError, 'the request broke' }
expect { client.request_access }
.to raise_error(::Gitlab::ExternalAuthorization::RequestFailed)
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index 67d7b37dd45..495cb16ebab 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -53,7 +53,10 @@ RSpec.describe Gitlab::Git::Blame, :seed_helper do
end
it 'converts to UTF-8' do
- expect(CharlockHolmes::EncodingDetector).to receive(:detect).and_return(nil)
+ expect_next_instance_of(CharlockHolmes::EncodingDetector) do |detector|
+ expect(detector).to receive(:detect).and_return(nil)
+ end
+
data = []
blame.each do |commit, line|
data << {
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index 9271f635b14..3cc52863976 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -44,6 +44,16 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
end
end
+ describe "#cache_key" do
+ subject { repository.branches.first }
+
+ it "returns a cache key that changes based on changeable values" do
+ digest = Digest::SHA1.hexdigest([subject.name, subject.target, subject.dereferenced_target.sha].join(":"))
+
+ expect(subject.cache_key).to eq("branch:#{digest}")
+ end
+ end
+
describe '#size' do
subject { super().size }
diff --git a/spec/lib/gitlab/git/lfs_changes_spec.rb b/spec/lib/gitlab/git/lfs_changes_spec.rb
index 286c5b98771..416a180ec8b 100644
--- a/spec/lib/gitlab/git/lfs_changes_spec.rb
+++ b/spec/lib/gitlab/git/lfs_changes_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Git::LfsChanges do
let_it_be(:project) { create(:project, :repository) }
+
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:blob_object_id) { '0c304a93cb8430108629bbbcaa27db3343299bc0' }
diff --git a/spec/lib/gitlab/git/merge_base_spec.rb b/spec/lib/gitlab/git/merge_base_spec.rb
index 1410e44a220..fda2232c2c3 100644
--- a/spec/lib/gitlab/git/merge_base_spec.rb
+++ b/spec/lib/gitlab/git/merge_base_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Git::MergeBase do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
subject(:merge_base) { described_class.new(repository, refs) }
diff --git a/spec/lib/gitlab/git/push_spec.rb b/spec/lib/gitlab/git/push_spec.rb
index 68cef558f6f..a0a4a22699b 100644
--- a/spec/lib/gitlab/git/push_spec.rb
+++ b/spec/lib/gitlab/git/push_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Git::Push do
let_it_be(:project) { create(:project, :repository) }
+
let(:oldrev) { project.commit('HEAD~2').id }
let(:newrev) { project.commit.id }
let(:ref) { 'refs/heads/some-branch' }
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 1e259c9c153..1ddbdda12b5 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -604,29 +604,29 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.to receive(:find_remote_root_ref).and_call_original
- expect(repository.find_remote_root_ref('origin')).to eq 'master'
+ expect(repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to eq 'master'
end
it 'returns UTF-8' do
- expect(repository.find_remote_root_ref('origin')).to be_utf8
+ expect(repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to be_utf8
end
it 'returns nil when remote name is nil' do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.not_to receive(:find_remote_root_ref)
- expect(repository.find_remote_root_ref(nil)).to be_nil
+ expect(repository.find_remote_root_ref(nil, nil)).to be_nil
end
it 'returns nil when remote name is empty' do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.not_to receive(:find_remote_root_ref)
- expect(repository.find_remote_root_ref('')).to be_nil
+ expect(repository.find_remote_root_ref('', '')).to be_nil
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RemoteService, :find_remote_root_ref do
- subject { repository.find_remote_root_ref('origin') }
+ subject { repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL) }
end
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 5ef964ac3c1..c44d7e44751 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -10,9 +10,9 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
it { expect(tree).to be_kind_of Array }
it { expect(tree.empty?).to be_falsey }
- it { expect(tree.select(&:dir?).size).to eq(2) }
- it { expect(tree.select(&:file?).size).to eq(10) }
- it { expect(tree.select(&:submodule?).size).to eq(2) }
+ it { expect(tree.count(&:dir?)).to eq(2) }
+ it { expect(tree.count(&:file?)).to eq(10) }
+ it { expect(tree.count(&:submodule?)).to eq(2) }
it 'returns an empty array when called with an invalid ref' do
expect(described_class.where(repository, 'foobar-does-not-exist')).to eq([])
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
index 36bff42d937..eb7deb08063 100644
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ b/spec/lib/gitlab/git/wiki_spec.rb
@@ -58,22 +58,6 @@ RSpec.describe Gitlab::Git::Wiki do
end
end
- describe '#delete_page' do
- after do
- destroy_page('page1')
- end
-
- it 'only removes the page with the same path' do
- create_page('page1', 'content')
- create_page('*', 'content')
-
- subject.delete_page('*', commit_details('whatever'))
-
- expect(subject.list_pages.count).to eq 1
- expect(subject.list_pages.first.title).to eq 'page1'
- end
- end
-
describe '#preview_slug' do
where(:title, :format, :expected_slug) do
'The Best Thing' | :markdown | 'The-Best-Thing'
diff --git a/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb b/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
index e448277b307..2c9da0f6606 100644
--- a/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
+++ b/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Git::WrapsGitalyErrors do
mapping.each do |grpc_error, error|
it "wraps #{grpc_error} in a #{error}" do
- expect { wrapper.wrapped_gitaly_errors { raise grpc_error.new('wrapped') } }
+ expect { wrapper.wrapped_gitaly_errors { raise grpc_error, 'wrapped' } }
.to raise_error(error)
end
end
diff --git a/spec/lib/gitlab/git_access_design_spec.rb b/spec/lib/gitlab/git_access_design_spec.rb
index ee25f6c2979..9fd1f2dcb0c 100644
--- a/spec/lib/gitlab/git_access_design_spec.rb
+++ b/spec/lib/gitlab/git_access_design_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::GitAccessDesign do
let_it_be(:project) { create(:project) }
let_it_be(:user) { project.owner }
+
let(:protocol) { 'web' }
let(:actor) { user }
diff --git a/spec/lib/gitlab/git_access_project_spec.rb b/spec/lib/gitlab/git_access_project_spec.rb
index 953b74cf1a9..ad593cbf005 100644
--- a/spec/lib/gitlab/git_access_project_spec.rb
+++ b/spec/lib/gitlab/git_access_project_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccessProject do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+
let(:container) { project }
let(:actor) { user }
let(:project_path) { project.path }
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 9a1ecfe6459..ae9c697e0b9 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -554,19 +554,19 @@ RSpec.describe Gitlab::GitAccess do
context 'when the repository is public' do
let(:options) { %i[repository_enabled] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
context 'when the repository is private' do
let(:options) { %i[repository_private] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
context 'when the repository is disabled' do
let(:options) { %i[repository_disabled] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
end
end
@@ -596,13 +596,13 @@ RSpec.describe Gitlab::GitAccess do
context 'when the repository is private' do
let(:options) { %i[repository_private] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
context 'when the repository is disabled' do
let(:options) { %i[repository_disabled] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
end
end
@@ -1034,7 +1034,7 @@ RSpec.describe Gitlab::GitAccess do
end
end
- context 'when the repository is read only' do
+ context 'when the repository is read-only' do
let(:project) { create(:project, :repository, :read_only) }
it 'denies push access' do
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index b78d99269d3..5ada8a6ef40 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::GitAccessWiki do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :wiki_repo) }
let_it_be(:wiki) { create(:project_wiki, project: project) }
+
let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] }
let(:authentication_abilities) { %i[read_project download_code push_code] }
let(:redirected_path) { nil }
diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
index b9ef76e1f41..70fc4fe4416 100644
--- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
@@ -35,22 +35,50 @@ RSpec.describe Gitlab::GitalyClient::RemoteService do
end
describe '#find_remote_root_ref' do
- it 'sends an find_remote_root_ref message and returns the root ref' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:find_remote_root_ref)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(ref: 'master'))
+ let(:remote) { 'origin' }
+ let(:url) { 'http://git.example.com/my-repo.git' }
+ let(:auth) { 'Basic secret' }
+
+ shared_examples 'a find_remote_root_ref call' do
+ it 'sends an find_remote_root_ref message and returns the root ref' do
+ expect_any_instance_of(Gitaly::RemoteService::Stub)
+ .to receive(:find_remote_root_ref)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return(double(ref: 'master'))
+
+ expect(client.find_remote_root_ref(remote, url, auth)).to eq 'master'
+ end
+
+ it 'ensure ref is a valid UTF-8 string' do
+ expect_any_instance_of(Gitaly::RemoteService::Stub)
+ .to receive(:find_remote_root_ref)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return(double(ref: "an_invalid_ref_\xE5"))
- expect(client.find_remote_root_ref('origin')).to eq 'master'
+ expect(client.find_remote_root_ref(remote, url, auth)).to eq "an_invalid_ref_Ã¥"
+ end
end
- it 'ensure ref is a valid UTF-8 string' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:find_remote_root_ref)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(ref: "an_invalid_ref_\xE5"))
+ context 'with inmemory feature enabled' do
+ before do
+ stub_feature_flags(find_remote_root_refs_inmemory: true)
+ end
+
+ it_behaves_like 'a find_remote_root_ref call' do
+ let(:expected_params) { { remote_url: url, http_authorization_header: auth } }
+ end
+ end
- expect(client.find_remote_root_ref('origin')).to eq "an_invalid_ref_Ã¥"
+ context 'with inmemory feature disabled' do
+ before do
+ stub_feature_flags(find_remote_root_refs_inmemory: false)
+ end
+
+ it_behaves_like 'a find_remote_root_ref call' do
+ let(:expected_params) { { remote: remote } }
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 4000e0b2611..194dfb228ee 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -32,8 +32,9 @@ RSpec.describe Gitlab::GithubImport::Client do
it 'returns the pull request reviews' do
client = described_class.new('foo')
- expect(client.octokit).to receive(:pull_request_reviews).with('foo/bar', 999)
- expect(client).to receive(:with_rate_limit).and_yield
+ expect(client)
+ .to receive(:each_object)
+ .with(:pull_request_reviews, 'foo/bar', 999)
client.pull_request_reviews('foo/bar', 999)
end
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
index 3bb57e152fe..ef0bb90db4a 100644
--- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -8,13 +8,14 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
let(:user) { create(:user) }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+ let(:note_body) { 'This is my note' }
let(:github_note) do
Gitlab::GithubImport::Representation::Note.new(
noteable_id: 1,
noteable_type: 'Issue',
author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
- note: 'This is my note',
+ note: note_body,
created_at: created_at,
updated_at: updated_at,
github_id: 1
@@ -92,6 +93,24 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
importer.execute
end
end
+
+ context 'when the note have invalid chars' do
+ let(:note_body) { %{There were an invalid char "\u0000" <= right here} }
+
+ it 'removes invalid chars' do
+ expect(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ expect { importer.execute }
+ .to change(project.notes, :count)
+ .by(1)
+
+ expect(project.notes.last.note)
+ .to eq('There were an invalid char "" <= right here')
+ end
+ end
end
context 'when the noteable does not exist' do
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index 5002e0384f3..fa8b5e6ccf0 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
.to change(Note, :count).by(1)
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Approved")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Approved")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -153,6 +153,20 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
+ context 'when original author was deleted in github' do
+ let(:review) { create_review(type: 'APPROVED', note: '', author: nil) }
+
+ it 'creates a note for the review without the author information' do
+ expect { subject.execute }
+ .to change(Note, :count).by(1)
+
+ last_note = merge_request.notes.last
+ expect(last_note.note).to eq('**Review:** Approved')
+ expect(last_note.author).to eq(project.creator)
+ expect(last_note.created_at).to eq(submitted_at)
+ end
+ end
+
context 'when the review has a note text' do
context 'when the review is "APPROVED"' do
let(:review) { create_review(type: 'APPROVED') }
@@ -163,7 +177,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Approved\n\nnote")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Approved\n\nnote")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -178,7 +192,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Commented\n\nnote")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Commented\n\nnote")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -193,7 +207,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Changes requested\n\nnote")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Changes requested\n\nnote")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -201,13 +215,13 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
- def create_review(type:, note: 'note')
+ def create_review(type:, note: 'note', author: { id: 999, login: 'author' })
Gitlab::GithubImport::Representation::PullRequestReview.from_json_hash(
merge_request_id: merge_request.id,
review_type: type,
note: note,
submitted_at: submitted_at.to_s,
- author: { id: 999, login: 'author' }
+ author: author
)
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
index b859cc727a6..4a47d103cde 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
@@ -23,12 +23,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
end
describe '#id_for_already_imported_cache' do
- it { expect(subject.id_for_already_imported_cache(double(number: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end
- describe '#each_object_to_import' do
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do
- pull_request = double
create(
:merged_merge_request,
iid: 999,
@@ -36,12 +35,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
target_project: project
)
+ pull_request = double
+
allow(client)
.to receive(:pull_request)
+ .exactly(:once) # ensure to be cached on the second call
.with('http://somegithub.com', 999)
.and_return(pull_request)
- expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(pull_request)
+ expect { |b| subject.each_object_to_import(&b) }
+ .to yield_with_args(pull_request)
+
+ subject.each_object_to_import {}
end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
index 5e2302f9662..f18064f10aa 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
@@ -23,12 +23,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end
describe '#id_for_already_imported_cache' do
- it { expect(subject.id_for_already_imported_cache(double(github_id: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end
- describe '#each_object_to_import' do
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do
- merge_request = create(:merge_request, source_project: project)
+ merge_request = create(
+ :merged_merge_request,
+ iid: 999,
+ source_project: project,
+ target_project: project
+ )
+
review = double
expect(review)
@@ -37,10 +43,14 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
allow(client)
.to receive(:pull_request_reviews)
+ .exactly(:once) # ensure to be cached on the second call
.with('github/repo', merge_request.iid)
.and_return([review])
- expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(review)
+ expect { |b| subject.each_object_to_import(&b) }
+ .to yield_with_args(review)
+
+ subject.each_object_to_import {}
end
end
end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 4995caa0733..22bf10f36d8 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -20,11 +20,25 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
expect(text.to_s).to eq('Hello')
end
+ it 'returns the text when the author has no login' do
+ author = double(:author, login: nil)
+ text = described_class.new('Hello', author, true)
+
+ expect(text.to_s).to eq('Hello')
+ end
+
it 'returns the text with an extra header when the author was not found' do
author = double(:author, login: 'Alice')
text = described_class.new('Hello', author)
expect(text.to_s).to eq("*Created by: Alice*\n\nHello")
end
+
+ it 'cleans invalid chars' do
+ author = double(:author, login: 'Alice')
+ text = described_class.format("\u0000Hello", author)
+
+ expect(text.to_s).to eq("*Created by: Alice*\n\nHello")
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 0dd2bd4df45..20e67a784e1 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -61,6 +61,10 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
expect(finder).to receive(:find).with(user.id, user.login).and_return(42)
expect(finder.user_id_for(user)).to eq(42)
end
+
+ it 'does not fail with empty input' do
+ expect(finder.user_id_for(nil)).to eq(nil)
+ end
end
describe '#find' do
diff --git a/spec/lib/gitlab/graphql/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecation_spec.rb
index 8b41145b855..2931e28a6ee 100644
--- a/spec/lib/gitlab/graphql/deprecation_spec.rb
+++ b/spec/lib/gitlab/graphql/deprecation_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe ::Gitlab::Graphql::Deprecation do
context 'when the context is :inline' do
it 'renders on one line' do
- expectation = '**Deprecated** in 10.10. This was renamed. Use: `X.y`.'
+ expectation = '**Deprecated** in 10.10. This was renamed. Use: [`X.y`](#xy).'
expect(deprecation.markdown).to eq(expectation)
expect(deprecation.markdown(context: :inline)).to eq(expectation)
@@ -177,7 +177,7 @@ RSpec.describe ::Gitlab::Graphql::Deprecation do
WARNING:
**Deprecated** in 10.10.
This was renamed.
- Use: `X.y`.
+ Use: [`X.y`](#xy).
MD
expect(deprecation.markdown(context: :block)).to eq(expectation)
diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
index 8c0f7aac081..14db51deb88 100644
--- a/spec/lib/gitlab/graphql/docs/renderer_spec.rb
+++ b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
@@ -1,10 +1,20 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Graphql::Docs::Renderer do
describe '#contents' do
+ shared_examples 'renders correctly as GraphQL documentation' do
+ it 'contains the expected section' do
+ # duplicative - but much better error messages!
+ section.lines.each { |line| expect(contents).to include(line) }
+ expect(contents).to include(section)
+ end
+ end
+
let(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/default.md.haml') }
+ let(:field_description) { 'List of objects.' }
+ let(:type) { ::GraphQL::INT_TYPE }
let(:query_type) do
Class.new(Types::BaseObject) { graphql_name 'Query' }.tap do |t|
@@ -15,6 +25,13 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
+ let(:mutation_root) do
+ Class.new(::Types::BaseObject) do
+ include ::Gitlab::Graphql::MountMutation
+ graphql_name 'Mutation'
+ end
+ end
+
let(:mock_schema) do
Class.new(GraphQL::Schema) do
def resolve_type(obj, ctx)
@@ -23,10 +40,9 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- let(:field_description) { 'List of objects.' }
-
subject(:contents) do
mock_schema.query(query_type)
+ mock_schema.mutation(mutation_root) if mutation_root.fields.any?
described_class.new(
mock_schema,
@@ -36,17 +52,18 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
describe 'headings' do
- let(:type) { ::GraphQL::INT_TYPE }
-
it 'contains the expected sections' do
expect(contents.lines.map(&:chomp)).to include(
'## `Query` type',
+ '## `Mutation` type',
+ '## Connections',
'## Object types',
'## Enumeration types',
'## Scalar types',
'## Abstract types',
'### Unions',
- '### Interfaces'
+ '### Interfaces',
+ '## Input types'
)
end
end
@@ -66,9 +83,11 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
expectation = <<~DOC
### `ArrayTest`
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `foo` | [`#{type_name}`](##{inner_type}) | A description. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="arraytestfoo"></a>`foo` | [`#{type_name}`](##{inner_type}) | A description. |
DOC
is_expected.to include(expectation)
@@ -77,7 +96,7 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
describe 'a top level query field' do
let(:expectation) do
<<~DOC
- ### `foo`
+ ### `Query.foo`
List of objects.
@@ -87,7 +106,7 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
| Name | Type | Description |
| ---- | ---- | ----------- |
- | `id` | [`ID`](#id) | ID of the object. |
+ | <a id="queryfooid"></a>`id` | [`ID`](#id) | ID of the object. |
DOC
end
@@ -119,20 +138,123 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
expectation = <<~DOC
### `OrderingTest`
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `bar` | [`String!`](#string) | A description of bar field. |
- | `foo` | [`String!`](#string) | A description of foo field. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="orderingtestbar"></a>`bar` | [`String!`](#string) | A description of bar field. |
+ | <a id="orderingtestfoo"></a>`foo` | [`String!`](#string) | A description of foo field. |
DOC
is_expected.to include(expectation)
end
end
+ context 'when a field has a documentation reference' do
+ let(:type) do
+ wibble = Class.new(::Types::BaseObject) do
+ graphql_name 'Wibble'
+ field :x, ::GraphQL::INT_TYPE, null: false
+ end
+
+ Class.new(Types::BaseObject) do
+ graphql_name 'DocRefSpec'
+ description 'Testing doc refs'
+
+ field :foo,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ description: 'The foo.',
+ see: { 'A list of foos' => 'https://example.com/foos' }
+ field :bar,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ description: 'The bar.',
+ see: { 'A list of bars' => 'https://example.com/bars' } do
+ argument :barity, ::GraphQL::INT_TYPE, required: false, description: '?'
+ end
+ field :wibbles,
+ type: wibble.connection_type,
+ null: true,
+ description: 'The wibbles',
+ see: { 'wibblance' => 'https://example.com/wibbles' }
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ### `DocRefSpec`
+
+ Testing doc refs.
+
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="docrefspecfoo"></a>`foo` | [`String!`](#string) | The foo. See [A list of foos](https://example.com/foos). |
+ | <a id="docrefspecwibbles"></a>`wibbles` | [`WibbleConnection`](#wibbleconnection) | The wibbles. See [wibblance](https://example.com/wibbles). (see [Connections](#connections)) |
+
+ #### Fields with arguments
+
+ ##### `DocRefSpec.bar`
+
+ The bar. See [A list of bars](https://example.com/bars).
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="docrefspecbarbarity"></a>`barity` | [`Int`](#int) | ?. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
+ context 'when an argument is deprecated' do
+ let(:type) do
+ Class.new(Types::BaseObject) do
+ graphql_name 'DeprecatedTest'
+ description 'A thing we used to use, but no longer support'
+
+ field :foo,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ description: 'A description.' do
+ argument :foo_arg, GraphQL::STRING_TYPE,
+ required: false,
+ description: 'The argument.',
+ deprecated: { reason: 'Bad argument', milestone: '101.2' }
+ end
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ##### `DeprecatedTest.foo`
+
+ A description.
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="deprecatedtestfoofooarg"></a>`fooArg` **{warning-solid}** | [`String`](#string) | **Deprecated** in 101.2. Bad argument. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
context 'when a field is deprecated' do
let(:type) do
Class.new(Types::BaseObject) do
graphql_name 'DeprecatedTest'
+ description 'A thing we used to use, but no longer support'
field :foo,
type: GraphQL::STRING_TYPE,
@@ -142,9 +264,9 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
field :foo_with_args,
type: GraphQL::STRING_TYPE,
null: false,
- deprecated: { reason: 'Do not use', milestone: '1.10' },
+ deprecated: { reason: 'Do not use', milestone: '1.10', replacement: 'X.y' },
description: 'A description.' do
- argument :fooity, ::GraphQL::INT_TYPE, required: false, description: 'X'
+ argument :arg, GraphQL::INT_TYPE, required: false, description: 'Argity'
end
field :bar,
type: GraphQL::STRING_TYPE,
@@ -158,24 +280,44 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- it 'includes the deprecation' do
- expectation = <<~DOC
+ let(:section) do
+ <<~DOC
### `DeprecatedTest`
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `bar` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This was renamed. Use: `Query.boom`. |
- | `foo` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This is deprecated. |
- | `fooWithArgs` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. Do not use. |
- DOC
+ A thing we used to use, but no longer support.
- is_expected.to include(expectation)
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="deprecatedtestbar"></a>`bar` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This was renamed. Use: [`Query.boom`](#queryboom). |
+ | <a id="deprecatedtestfoo"></a>`foo` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This is deprecated. |
+
+ #### Fields with arguments
+
+ ##### `DeprecatedTest.fooWithArgs`
+
+ A description.
+
+ WARNING:
+ **Deprecated** in 1.10.
+ Do not use.
+ Use: [`X.y`](#xy).
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="deprecatedtestfoowithargsarg"></a>`arg` | [`Int`](#int) | Argity. |
+ DOC
end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
context 'when a Query.field is deprecated' do
- let(:type) { ::GraphQL::INT_TYPE }
-
before do
query_type.field(
name: :bar,
@@ -186,28 +328,30 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
)
end
- it 'includes the deprecation' do
- expectation = <<~DOC
- ### `bar`
+ let(:type) { ::GraphQL::INT_TYPE }
+ let(:section) do
+ <<~DOC
+ ### `Query.bar`
A bar.
WARNING:
**Deprecated** in 10.11.
This was renamed.
- Use: `Query.foo`.
+ Use: [`Query.foo`](#queryfoo).
Returns [`Int`](#int).
DOC
-
- is_expected.to include(expectation)
end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
context 'when a field has an Enumeration type' do
let(:type) do
enum_type = Class.new(Types::BaseEnum) do
graphql_name 'MyEnum'
+ description 'A test of an enum.'
value 'BAZ',
description: 'A description of BAZ.'
@@ -223,18 +367,20 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- it 'includes the description of the Enumeration' do
- expectation = <<~DOC
+ let(:section) do
+ <<~DOC
### `MyEnum`
+ A test of an enum.
+
| Value | Description |
| ----- | ----------- |
- | `BAR` **{warning-solid}** | **Deprecated:** This is deprecated. Deprecated in 1.10. |
- | `BAZ` | A description of BAZ. |
+ | <a id="myenumbar"></a>`BAR` **{warning-solid}** | **Deprecated:** This is deprecated. Deprecated in 1.10. |
+ | <a id="myenumbaz"></a>`BAZ` | A description of BAZ. |
DOC
-
- is_expected.to include(expectation)
end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
context 'when a field has a global ID type' do
@@ -247,29 +393,152 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- it 'includes the field and the description of the ID, so we can link to it' do
- type_section = <<~DOC
- ### `IDTest`
+ describe 'section for IDTest' do
+ let(:section) do
+ <<~DOC
+ ### `IDTest`
- A test for rendering IDs.
+ A test for rendering IDs.
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `foo` | [`UserID`](#userid) | A user foo. |
- DOC
+ #### Fields
- id_section = <<~DOC
- ### `UserID`
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="idtestfoo"></a>`foo` | [`UserID`](#userid) | A user foo. |
+ DOC
+ end
- A `UserID` is a global ID. It is encoded as a string.
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
- An example `UserID` is: `"gid://gitlab/User/1"`.
- DOC
+ describe 'section for UserID' do
+ let(:section) do
+ <<~DOC
+ ### `UserID`
+
+ A `UserID` is a global ID. It is encoded as a string.
+
+ An example `UserID` is: `"gid://gitlab/User/1"`.
+ DOC
+ end
- is_expected.to include(type_section, id_section)
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
end
+ context 'when there is a mutation' do
+ let(:mutation) do
+ mutation = Class.new(::Mutations::BaseMutation)
+
+ mutation.graphql_name 'MakeItPretty'
+ mutation.description 'Make everything very pretty.'
+
+ mutation.argument :prettiness_factor,
+ type: GraphQL::FLOAT_TYPE,
+ required: true,
+ description: 'How much prettier?'
+
+ mutation.argument :pulchritude,
+ type: GraphQL::FLOAT_TYPE,
+ required: false,
+ description: 'How much prettier?',
+ deprecated: {
+ reason: :renamed,
+ replacement: 'prettinessFactor',
+ milestone: '72.34'
+ }
+
+ mutation.field :everything,
+ type: GraphQL::STRING_TYPE,
+ null: true,
+ description: 'What we made prettier.'
+
+ mutation.field :omnis,
+ type: GraphQL::STRING_TYPE,
+ null: true,
+ description: 'What we made prettier.',
+ deprecated: {
+ reason: :renamed,
+ replacement: 'everything',
+ milestone: '72.34'
+ }
+
+ mutation
+ end
+
+ before do
+ mutation_root.mount_mutation mutation
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation' do
+ let(:section) do
+ <<~DOC
+ ### `Mutation.makeItPretty`
+
+ Make everything very pretty.
+
+ Input type: `MakeItPrettyInput`
+
+ #### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="mutationmakeitprettyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+ | <a id="mutationmakeitprettyprettinessfactor"></a>`prettinessFactor` | [`Float!`](#float) | How much prettier?. |
+ | <a id="mutationmakeitprettypulchritude"></a>`pulchritude` **{warning-solid}** | [`Float`](#float) | **Deprecated:** This was renamed. Please use `prettinessFactor`. Deprecated in 72.34. |
+
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="mutationmakeitprettyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+ | <a id="mutationmakeitprettyerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
+ | <a id="mutationmakeitprettyeverything"></a>`everything` | [`String`](#string) | What we made prettier. |
+ | <a id="mutationmakeitprettyomnis"></a>`omnis` **{warning-solid}** | [`String`](#string) | **Deprecated:** This was renamed. Please use `everything`. Deprecated in 72.34. |
+ DOC
+ end
+ end
+
+ it 'does not render the automatically generated payload type' do
+ expect(contents).not_to include('MakeItPrettyPayload')
+ end
+
+ it 'does not render the automatically generated input type as its own section' do
+ expect(contents).not_to include('# `MakeItPrettyInput`')
+ end
+ end
+
+ context 'when there is an input type' do
+ let(:type) do
+ Class.new(::Types::BaseObject) do
+ graphql_name 'Foo'
+ field :wibble, type: ::GraphQL::INT_TYPE, null: true do
+ argument :date_range,
+ type: ::Types::TimeframeInputType,
+ required: true,
+ description: 'When the foo happened.'
+ end
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ### `Timeframe`
+
+ A time-frame defined as a closed inclusive range of two dates.
+
+ #### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="timeframeend"></a>`end` | [`Date!`](#date) | The end of the range. |
+ | <a id="timeframestart"></a>`start` | [`Date!`](#date) | The start of the range. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
context 'when there is an interface and a union' do
let(:type) do
user = Class.new(::Types::BaseObject)
@@ -297,7 +566,7 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
interface.orphan_types african_swallow
Class.new(::Types::BaseObject) do
- graphql_name 'AbstactTypeTest'
+ graphql_name 'AbstractTypeTest'
description 'A test for abstract types.'
field :foo, union, null: true, description: 'The foo.'
@@ -307,14 +576,16 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
it 'lists the fields correctly, and includes descriptions of all the types' do
type_section = <<~DOC
- ### `AbstactTypeTest`
+ ### `AbstractTypeTest`
A test for abstract types.
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `flying` | [`Flying`](#flying) | A flying thing. |
- | `foo` | [`UserOrGroup`](#userorgroup) | The foo. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="abstracttypetestflying"></a>`flying` | [`Flying`](#flying) | A flying thing. |
+ | <a id="abstracttypetestfoo"></a>`foo` | [`UserOrGroup`](#userorgroup) | The foo. |
DOC
union_section = <<~DOC
@@ -337,9 +608,11 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
- [`AfricanSwallow`](#africanswallow)
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `flightSpeed` | [`Int`](#int) | Speed in mph. |
+ ##### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="flyingflightspeed"></a>`flightSpeed` | [`Int`](#int) | Speed in mph. |
DOC
implementation_section = <<~DOC
@@ -347,9 +620,11 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
A swallow from Africa.
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `flightSpeed` | [`Int`](#int) | Speed in mph. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="africanswallowflightspeed"></a>`flightSpeed` | [`Int`](#int) | Speed in mph. |
DOC
is_expected.to include(
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 839ad9110cc..03030728834 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -357,9 +357,10 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
it 'is added to end' do
sliced = subject.sliced_nodes
- last_order_name = sliced.order_values.last.expr.name
- expect(last_order_name).to eq sliced.primary_key
+ order_sql = sliced.order_values.last.to_sql
+
+ expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
end
end
diff --git a/spec/lib/gitlab/graphql/present/field_extension_spec.rb b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
index 5e66e16d655..6ea313d30b3 100644
--- a/spec/lib/gitlab/graphql/present/field_extension_spec.rb
+++ b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
@@ -33,6 +33,48 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
end
end
+ context 'when the field is declared on an interface, and implemented by a presenter' do
+ let(:interface) do
+ Module.new do
+ include ::Types::BaseInterface
+
+ field :interface_field, GraphQL::STRING_TYPE, null: true
+ end
+ end
+
+ let(:implementation) do
+ type = fresh_object_type('Concrete')
+ type.present_using(concrete_impl)
+ type.implements(interface)
+ type
+ end
+
+ def concrete_impl
+ Class.new(base_presenter) do
+ def interface_field
+ 'made of concrete'
+ end
+ end
+ end
+
+ it 'resolves the interface field using the implementation from the presenter' do
+ field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
+ value = resolve_field(field, object, object_type: implementation)
+
+ expect(value).to eq 'made of concrete'
+ end
+
+ context 'when the implementation is inherited' do
+ it 'resolves the interface field using the implementation from the presenter' do
+ subclass = Class.new(implementation) { graphql_name 'Subclass' }
+ field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
+ value = resolve_field(field, object, object_type: subclass)
+
+ expect(value).to eq 'made of concrete'
+ end
+ end
+ end
+
describe 'interactions with inheritance' do
def parent
type = fresh_object_type('Parent')
diff --git a/spec/lib/gitlab/graphql/queries_spec.rb b/spec/lib/gitlab/graphql/queries_spec.rb
index a140a283c1b..a1cd2cdb2de 100644
--- a/spec/lib/gitlab/graphql/queries_spec.rb
+++ b/spec/lib/gitlab/graphql/queries_spec.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
+require 'spec_helper'
require 'fast_spec_helper'
require "test_prof/recipes/rspec/let_it_be"
@@ -124,6 +125,18 @@ RSpec.describe Gitlab::Graphql::Queries do
expect(described_class.find(path)).to be_empty
end
+ it 'ignores customer.query.graphql' do
+ path = root / 'plans.customer.query.graphql'
+
+ expect(described_class.find(path)).to be_empty
+ end
+
+ it 'ignores customer.mutation.graphql' do
+ path = root / 'plans.customer.mutation.graphql'
+
+ expect(described_class.find(path)).to be_empty
+ end
+
it 'finds all query definitions under a root directory' do
found = described_class.find(root)
@@ -137,7 +150,9 @@ RSpec.describe Gitlab::Graphql::Queries do
expect(found).not_to include(
definition_of(root / 'typedefs.graphql'),
- definition_of(root / 'author.fragment.graphql')
+ definition_of(root / 'author.fragment.graphql'),
+ definition_of(root / 'plans.customer.query.graphql'),
+ definition_of(root / 'plans.customer.mutation.graphql')
)
end
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 1a929373716..a5e4d37d306 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -46,11 +46,20 @@ RSpec.describe Gitlab::Highlight do
expect(result).to eq(%[<span id="LC1" class="line" lang="plaintext">plain text contents</span>])
end
- it 'returns plain version for long content' do
- stub_const('Gitlab::Highlight::MAXIMUM_TEXT_HIGHLIGHT_SIZE', 1)
- result = described_class.highlight(file_name, content)
+ context 'when content is too long to be highlighted' do
+ let(:result) { described_class.highlight(file_name, content) } # content is 44 bytes
- expect(result).to eq(%[<span id="LC1" class="line" lang="">(make-pathname :defaults name</span>\n<span id="LC2" class="line" lang="">:type "assem")</span>])
+ before do
+ stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => 0.0001 } ) # 1.024 bytes
+ end
+
+ it 'increments the metric for oversized files' do
+ expect { result }.to change { over_highlight_size_limit('text highlighter') }.by(1)
+ end
+
+ it 'returns plain version for long content' do
+ expect(result).to eq(%[<span id="LC1" class="line" lang="">(make-pathname :defaults name</span>\n<span id="LC2" class="line" lang="">:type "assem")</span>])
+ end
end
it 'highlights multi-line comments' do
@@ -132,5 +141,46 @@ RSpec.describe Gitlab::Highlight do
subject.highlight("Content")
end
end
+
+ describe 'highlight timeouts' do
+ context 'when there is a timeout error while highlighting' do
+ let(:result) { described_class.highlight(file_name, content) }
+
+ before do
+ allow(Timeout).to receive(:timeout).twice.and_raise(Timeout::Error)
+ # This is done twice because it's rescued first and then
+ # calls the original exception
+ end
+
+ it "increments the foreground counter if it's in the foreground" do
+ expect { result }
+ .to raise_error(Timeout::Error)
+ .and change { highlight_timeout_total('foreground') }.by(1)
+ .and not_change { highlight_timeout_total('background') }
+ end
+
+ it "increments the background counter if it's in the background" do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+
+ expect { result }
+ .to raise_error(Timeout::Error)
+ .and change { highlight_timeout_total('background') }.by(1)
+ .and not_change { highlight_timeout_total('foreground') }
+ end
+ end
+ end
+ end
+
+ def highlight_timeout_total(source)
+ Gitlab::Metrics
+ .counter(:highlight_timeout, 'Counts the times highlights have timed out')
+ .get(source: source)
+ end
+
+ def over_highlight_size_limit(source)
+ Gitlab::Metrics
+ .counter(:over_highlight_size_limit,
+ 'Count the times text has been over the highlight size limit')
+ .get(source: source)
end
end
diff --git a/spec/lib/gitlab/hook_data/key_builder_spec.rb b/spec/lib/gitlab/hook_data/key_builder_spec.rb
new file mode 100644
index 00000000000..86f33df115f
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/key_builder_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::KeyBuilder do
+ let_it_be(:personal_key) { create(:personal_key) }
+ let_it_be(:other_key) { create(:key) }
+
+ describe '#build' do
+ let(:data) { described_class.new(key).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:common_attributes) do
+ [
+ :event_name, :created_at, :updated_at, :key, :id
+ ]
+ end
+
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data.keys).to contain_exactly(*attributes)
+
+ expect(data[:key]).to eq(key.key)
+ expect(data[:id]).to eq(key.id)
+ expect(data[:created_at]).to eq(key.created_at.xmlschema)
+ expect(data[:updated_at]).to eq(key.updated_at.xmlschema)
+ end
+ end
+
+ context 'for keys that belong to a user' do
+ let(:key) { personal_key }
+ let(:attributes) { common_attributes.append(:username) }
+
+ context 'data' do
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('key_create') }
+ it { expect(data[:username]).to eq(key.user.username) }
+ it_behaves_like 'includes the required attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('key_destroy') }
+ it { expect(data[:username]).to eq(key.user.username) }
+ it_behaves_like 'includes the required attributes'
+ end
+ end
+ end
+
+ context 'for keys that do not belong to a user' do
+ let(:key) { other_key }
+ let(:attributes) { common_attributes }
+
+ context 'data' do
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('key_create') }
+ it_behaves_like 'includes the required attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('key_destroy') }
+ it_behaves_like 'includes the required attributes'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/project_builder_spec.rb b/spec/lib/gitlab/hook_data/project_builder_spec.rb
new file mode 100644
index 00000000000..672dbab918f
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/project_builder_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::ProjectBuilder do
+ let_it_be(:user) { create(:user, name: 'John', email: 'john@example.com') }
+ let_it_be(:namespace) { create(:namespace, owner: user) }
+ let_it_be(:project) { create(:project, :internal, name: 'my_project', namespace: namespace) }
+
+ describe '#build' do
+ let(:data) { described_class.new(project).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:attributes) do
+ [
+ :event_name, :created_at, :updated_at, :name, :path, :path_with_namespace, :project_id,
+ :owner_name, :owner_email, :project_visibility
+ ]
+ end
+
+ context 'data' do
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data).to include(*attributes)
+
+ expect(data[:created_at]).to eq(project.created_at.xmlschema)
+ expect(data[:updated_at]).to eq(project.updated_at.xmlschema)
+ expect(data[:name]).to eq('my_project')
+ expect(data[:path]).to eq(project.path)
+ expect(data[:path_with_namespace]).to eq(project.full_path)
+ expect(data[:project_id]).to eq(project.id)
+ expect(data[:owner_name]).to eq('John')
+ expect(data[:owner_email]).to eq('john@example.com')
+ expect(data[:project_visibility]).to eq('internal')
+ end
+ end
+
+ shared_examples_for 'does not include `old_path_with_namespace` attribute' do
+ it 'does not include `old_path_with_namespace` attribute' do
+ expect(data).not_to include(:old_path_with_namespace)
+ end
+ end
+
+ shared_examples_for 'includes `old_path_with_namespace` attribute' do
+ it 'includes `old_path_with_namespace` attribute' do
+ allow(project).to receive(:old_path_with_namespace).and_return('old-path-with-namespace')
+ expect(data[:old_path_with_namespace]).to eq('old-path-with-namespace')
+ end
+ end
+
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('project_create') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('project_destroy') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on rename' do
+ let(:event) { :rename }
+
+ it { expect(event_name).to eq('project_rename') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+
+ context 'on transfer' do
+ let(:event) { :transfer }
+
+ it { expect(event_name).to eq('project_transfer') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/i18n_spec.rb b/spec/lib/gitlab/i18n_spec.rb
index ee10739195a..aae4a13bd73 100644
--- a/spec/lib/gitlab/i18n_spec.rb
+++ b/spec/lib/gitlab/i18n_spec.rb
@@ -3,13 +3,18 @@
require 'spec_helper'
RSpec.describe Gitlab::I18n do
- let(:user) { create(:user, preferred_language: 'es') }
+ let(:user) { create(:user, preferred_language: :es) }
describe '.selectable_locales' do
- it 'does not return languages that should not be available in the UI' do
- Gitlab::I18n::NOT_AVAILABLE_IN_UI.each do |language|
- expect(described_class.selectable_locales).not_to include(language)
- end
+ include StubLanguagesTranslationPercentage
+
+ it 'does not return languages with low translation levels' do
+ stub_languages_translation_percentage(pt_BR: 0, en: 100, es: 65)
+
+ expect(described_class.selectable_locales).to eq({
+ 'en' => 'English',
+ 'es' => 'Spanish - español'
+ })
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 5d1e3c79474..f81db1413c2 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -113,6 +113,7 @@ releases:
- author
- project
- links
+- sorted_links
- milestone_releases
- milestones
- evidences
@@ -124,6 +125,7 @@ project_members:
- source
- project
merge_requests:
+- status_check_responses
- subscriptions
- award_emoji
- author
@@ -258,6 +260,8 @@ ci_pipelines:
- latest_statuses
- dast_profile
- dast_profiles_pipeline
+- dast_site_profile
+- dast_site_profiles_pipeline
ci_refs:
- project
- ci_pipelines
@@ -302,7 +306,7 @@ deploy_keys:
- user
- deploy_keys_projects
- projects
-services:
+integrations:
- project
- service_hook
- jira_tracker_data
@@ -341,8 +345,9 @@ project:
- external_approval_rules
- taggings
- base_tags
-- tag_taggings
- tags
+- topic_taggings
+- topics
- chat_services
- cluster
- clusters
@@ -350,12 +355,13 @@ project:
- cluster_project
- creator
- cycle_analytics_stages
+- value_streams
- group
- namespace
- management_clusters
- boards
- last_event
-- services
+- integrations
- campfire_service
- confluence_service
- datadog_service
@@ -369,7 +375,6 @@ project:
- packagist_service
- pivotaltracker_service
- prometheus_service
-- hipchat_service
- flowdock_service
- assembla_service
- asana_service
@@ -569,6 +574,7 @@ project:
- debian_distributions
- merge_request_metrics
- security_orchestration_policy_configuration
+- timelogs
award_emoji:
- awardable
- user
@@ -745,3 +751,5 @@ issuable_sla:
- issue
push_rule:
- group
+bulk_import_export:
+ - group
diff --git a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
index 0581f07dd3f..7e17d56def0 100644
--- a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
@@ -35,8 +35,9 @@ RSpec.describe 'Import/Export attribute configuration' do
<<-MSG
It looks like #{relation_class}, which is exported using the project Import/Export, has new attributes: #{new_attributes.join(',')}
- Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if you consider this can be exported.
- Please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
+ Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if they can be exported.
+
+ Please denylist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its corresponding
model in the +excluded_attributes+ section.
SAFE_MODEL_ATTRIBUTES: #{File.expand_path(safe_attributes_file)}
diff --git a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
index 09e6e5a03bb..df33b4896a4 100644
--- a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationFactory do
subject do
described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper,
user: user,
diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb
index b00a2597681..4000e303816 100644
--- a/spec/lib/gitlab/import_export/command_line_util_spec.rb
+++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb
@@ -35,4 +35,19 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
it 'has the right mask for uploads' do
expect(file_permissions("#{path}/uploads")).to eq(0755) # originally 555
end
+
+ describe '#gzip' do
+ it 'compresses specified file' do
+ tempfile = Tempfile.new('test', path)
+ filename = File.basename(tempfile.path)
+
+ subject.gzip(dir: path, filename: filename)
+ end
+
+ context 'when exception occurs' do
+ it 'raises an exception' do
+ expect { subject.gzip(dir: path, filename: 'test') }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/config_spec.rb b/spec/lib/gitlab/import_export/config_spec.rb
index 40cf75779b6..7ad5d3d846c 100644
--- a/spec/lib/gitlab/import_export/config_spec.rb
+++ b/spec/lib/gitlab/import_export/config_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::ImportExport::Config do
expect { subject }.not_to raise_error
expect(subject).to be_a(Hash)
expect(subject.keys).to contain_exactly(
- :tree, :excluded_attributes, :included_attributes, :methods, :preloads)
+ :tree, :excluded_attributes, :included_attributes, :methods, :preloads, :export_reorders)
end
end
end
diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb
index dc668e972cf..ed4436b7257 100644
--- a/spec/lib/gitlab/import_export/file_importer_spec.rb
+++ b/spec/lib/gitlab/import_export/file_importer_spec.rb
@@ -71,6 +71,22 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
it 'creates the file in the right subfolder' do
expect(shared.export_path).to include('test/abcd')
end
+
+ context 'when the import file is remote' do
+ include AfterNextHelpers
+
+ it 'downloads the file from a remote object storage' do
+ file_url = 'https://remote.url/file'
+ import_export_upload = build(:import_export_upload, remote_import_url: file_url)
+ project = build( :project, import_export_upload: import_export_upload)
+
+ expect_next(described_class)
+ .to receive(:download)
+ .with(file_url, kind_of(String))
+
+ described_class.import(importable: project, archive_file: nil, shared: shared)
+ end
+ end
end
context 'error' do
diff --git a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
index 6b2f80cc80a..63286fc0719 100644
--- a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
described_class.create(
relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
members_mapper: members_mapper,
object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
user: importer_user,
diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb
index 9c7f41cbb89..e092891f236 100644
--- a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb
+++ b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative 'shared_example.rb'
+require_relative 'shared_example'
RSpec.describe Gitlab::ImportExport::JSON::LegacyReader::File do
it_behaves_like 'import/export json legacy reader' do
diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb
index d0899accf59..e47122b6151 100644
--- a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb
+++ b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative 'shared_example.rb'
+require_relative 'shared_example'
RSpec.describe Gitlab::ImportExport::JSON::LegacyReader::Hash do
it_behaves_like 'import/export json legacy reader' do
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 762687beedb..a0b2faaecfe 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -30,12 +30,14 @@ RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do
let(:json_writer) { instance_double('Gitlab::ImportExport::JSON::LegacyWriter') }
let(:hash) { { name: exportable.name, description: exportable.description }.stringify_keys }
let(:include) { [] }
+ let(:custom_orderer) { nil }
let(:relations_schema) do
{
only: [:name, :description],
include: include,
- preload: { issues: nil }
+ preload: { issues: nil },
+ export_reorder: custom_orderer
}
end
@@ -57,19 +59,63 @@ RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do
[{ issues: { include: [] } }]
end
+ before do
+ create_list(:issue, 3, project: exportable, relative_position: 10000) # ascending ids, same position positive
+ create_list(:issue, 3, project: exportable, relative_position: -5000) # ascending ids, same position negative
+ create_list(:issue, 3, project: exportable, relative_position: 0) # ascending ids, duplicate positions
+ create_list(:issue, 3, project: exportable, relative_position: nil) # no position
+ create_list(:issue, 3, :with_desc_relative_position, project: exportable ) # ascending ids, descending position
+ end
+
it 'calls json_writer.write_relation_array with proper params' do
expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(issue.to_json))
subject.execute
end
- context 'relation ordering' do
- before do
- create_list(:issue, 5, project: exportable)
+ context 'default relation ordering' do
+ it 'orders exported issues by primary key(:id)' do
+ expected_issues = exportable.issues.reorder(:id).map(&:to_json)
+
+ expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues)
+
+ subject.execute
end
+ end
- it 'orders exported issues by primary key' do
- expected_issues = exportable.issues.reorder(:id).map(&:to_json)
+ context 'custom relation ordering ascending' do
+ let(:custom_orderer) do
+ {
+ issues: {
+ column: :relative_position,
+ direction: :asc,
+ nulls_position: :nulls_last
+ }
+ }
+ end
+
+ it 'orders exported issues by custom column(relative_position)' do
+ expected_issues = exportable.issues.reorder(:relative_position, :id).map(&:to_json)
+
+ expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues)
+
+ subject.execute
+ end
+ end
+
+ context 'custom relation ordering descending' do
+ let(:custom_orderer) do
+ {
+ issues: {
+ column: :relative_position,
+ direction: :desc,
+ nulls_position: :nulls_first
+ }
+ }
+ end
+
+ it 'orders exported issues by custom column(relative_position)' do
+ expected_issues = exportable.issues.order_relative_position_desc.order(id: :desc).map(&:to_json)
expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues)
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 56ba730e893..38e700e8f9e 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
+RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_memory_store_caching do
let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object }
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
described_class.create(
relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper,
user: importer_user,
@@ -171,6 +172,75 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
end
end
+ context 'issue object' do
+ let(:relation_sym) { :issues }
+
+ let(:exported_member) do
+ {
+ "id" => 111,
+ "access_level" => 30,
+ "source_id" => 1,
+ "source_type" => "Project",
+ "user_id" => 3,
+ "notification_level" => 3,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "user" => {
+ "id" => admin.id,
+ "email" => admin.email,
+ "username" => admin.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: importer_user,
+ importable: project)
+ end
+
+ let(:relation_hash) do
+ {
+ 'id' => 20,
+ 'target_branch' => "feature",
+ 'source_branch' => "feature_conflict",
+ 'project_id' => project.id,
+ 'author_id' => admin.id,
+ 'assignee_id' => admin.id,
+ 'updated_by_id' => admin.id,
+ 'title' => "Issue 1",
+ 'created_at' => "2016-06-14T15:02:36.568Z",
+ 'updated_at' => "2016-06-14T15:02:56.815Z",
+ 'state' => "opened",
+ 'description' => "Description",
+ "relative_position" => 25111 # just a random position
+ }
+ end
+
+ it 'has preloaded project' do
+ expect(created_object.project).to equal(project)
+ end
+
+ context 'computing relative position' do
+ context 'when max relative position in the hierarchy is not cached' do
+ it 'has computed new relative_position' do
+ expect(created_object.relative_position).to equal(1026) # 513*2 - ideal distance
+ end
+ end
+
+ context 'when max relative position in the hierarchy is cached' do
+ before do
+ Rails.cache.write("import:#{project.model_name.plural}:#{project.id}:hierarchy_max_issues_relative_position", 10000)
+ end
+
+ it 'has computed new relative_position' do
+ expect(created_object.relative_position).to equal(10000 + 1026) # 513*2 - ideal distance
+ end
+ end
+ end
+ end
+
context 'label object' do
let(:relation_sym) { :labels }
let(:relation_hash) do
diff --git a/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
index 86d5f2402f8..9dde09a7602 100644
--- a/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::ImportExport::Project::Sample::RelationFactory do
described_class.create( # rubocop:disable Rails/SaveBang
relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper,
user: importer_user,
diff --git a/spec/lib/gitlab/import_export/references_configuration_spec.rb b/spec/lib/gitlab/import_export/references_configuration_spec.rb
index 2934d0059ee..6320fbed975 100644
--- a/spec/lib/gitlab/import_export/references_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/references_configuration_spec.rb
@@ -38,8 +38,9 @@ RSpec.describe 'Import/Export Project configuration' do
<<-MSG
It looks like #{relation_class}, which is exported using the project Import/Export, has references: #{prohibited_keys.join(',')}
- Please replace it with actual relation in IMPORT_EXPORT_CONFIG if you consider this can be exported.
- Please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
+ Please replace it with actual relation in IMPORT_EXPORT_CONFIG if they can be exported.
+
+ Please denylist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its corresponding
model in the +excluded_attributes+ section.
IMPORT_EXPORT_CONFIG: #{Gitlab::ImportExport.config_file}
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index b159d0cfc76..70ebff2a54e 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -102,6 +102,7 @@ ProjectLabel:
- template
- description
- priority
+- remove_on_close
Milestone:
- id
- title
@@ -208,6 +209,7 @@ MergeRequest:
- discussion_locked
- allow_maintainer_to_push
- merge_ref_sha
+- draft
MergeRequestDiff:
- id
- state
@@ -640,6 +642,7 @@ Timelog:
- time_spent
- merge_request_id
- user_id
+- project_id
- spent_at
- created_at
- updated_at
diff --git a/spec/lib/gitlab/integrations/sti_type_spec.rb b/spec/lib/gitlab/integrations/sti_type_spec.rb
new file mode 100644
index 00000000000..3154872ed04
--- /dev/null
+++ b/spec/lib/gitlab/integrations/sti_type_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Integrations::StiType do
+ let(:types) { ['AsanaService', 'Integrations::Asana', Integrations::Asana] }
+
+ describe '#serialize' do
+ context 'SQL SELECT' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ SELECT "services".* FROM "services" WHERE "services"."type" = 'AsanaService'
+ SQL
+ end
+
+ it 'forms SQL SELECT statements correctly' do
+ sql_statements = types.map do |type|
+ Integration.where(type: type).to_sql
+ end
+
+ expect(sql_statements).to all(eq(expected_sql))
+ end
+ end
+
+ context 'SQL CREATE' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ INSERT INTO "services" ("type") VALUES ('AsanaService')
+ SQL
+ end
+
+ it 'forms SQL CREATE statements correctly' do
+ sql_statements = types.map do |type|
+ record = ActiveRecord::QueryRecorder.new { Integration.insert({ type: type }) }
+ record.log.first
+ end
+
+ expect(sql_statements).to all(include(expected_sql))
+ end
+ end
+
+ context 'SQL UPDATE' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ UPDATE "services" SET "type" = 'AsanaService'
+ SQL
+ end
+
+ let_it_be(:service) { create(:service) }
+
+ it 'forms SQL UPDATE statements correctly' do
+ sql_statements = types.map do |type|
+ record = ActiveRecord::QueryRecorder.new { service.update_column(:type, type) }
+ record.log.first
+ end
+
+ expect(sql_statements).to all(include(expected_sql))
+ end
+ end
+
+ context 'SQL DELETE' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ DELETE FROM "services" WHERE "services"."type" = 'AsanaService'
+ SQL
+ end
+
+ let(:service) { create(:service) }
+
+ it 'forms SQL DELETE statements correctly' do
+ sql_statements = types.map do |type|
+ record = ActiveRecord::QueryRecorder.new { Integration.delete_by(type: type) }
+ record.log.first
+ end
+
+ expect(sql_statements).to all(match(expected_sql))
+ end
+ end
+ end
+
+ describe '#deserialize' do
+ specify 'it deserializes type correctly', :aggregate_failures do
+ types.each do |type|
+ service = create(:service, type: type)
+
+ expect(service.type).to eq('AsanaService')
+ end
+ end
+ end
+
+ describe '#cast' do
+ it 'casts type as model correctly', :aggregate_failures do
+ create(:service, type: 'AsanaService')
+
+ types.each do |type|
+ expect(Integration.find_by(type: type)).to be_kind_of(Integrations::Asana)
+ end
+ end
+ end
+
+ describe '#changed?' do
+ it 'detects changes correctly', :aggregate_failures do
+ service = create(:service, type: 'AsanaService')
+
+ types.each do |type|
+ service.type = type
+
+ expect(service).not_to be_changed
+ end
+
+ service.type = 'NewType'
+
+ expect(service).to be_changed
+ end
+ end
+end
diff --git a/spec/lib/gitlab/jwt_token_spec.rb b/spec/lib/gitlab/jwt_token_spec.rb
new file mode 100644
index 00000000000..d89ca127393
--- /dev/null
+++ b/spec/lib/gitlab/jwt_token_spec.rb
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::JWTToken do
+ it_behaves_like 'a gitlab jwt token'
+end
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index 01ced407883..e323f76b42e 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -33,6 +33,46 @@ RSpec.describe Gitlab::Kas do
end
end
+ describe '.enabled?' do
+ before do
+ allow(Gitlab).to receive(:config).and_return(gitlab_config)
+ end
+
+ subject { described_class.enabled? }
+
+ context 'gitlab_config is not enabled' do
+ let(:gitlab_config) { { 'gitlab_kas' => { 'enabled' => false } } }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'gitlab_config is enabled' do
+ let(:gitlab_config) { { 'gitlab_kas' => { 'enabled' => true } } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'enabled is unset' do
+ let(:gitlab_config) { { 'gitlab_kas' => {} } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '.external_url' do
+ it 'returns gitlab_kas external_url config' do
+ expect(described_class.external_url).to eq(Gitlab.config.gitlab_kas.external_url)
+ end
+ end
+
+ describe '.version' do
+ it 'returns gitlab_kas version config' do
+ version_file = Rails.root.join(described_class::VERSION_FILE)
+
+ expect(described_class.version).to eq(version_file.read.chomp)
+ end
+ end
+
describe '.ensure_secret!' do
context 'secret file exists' do
before do
diff --git a/spec/lib/gitlab/lfs/client_spec.rb b/spec/lib/gitlab/lfs/client_spec.rb
index 1c50a2a7500..0f9637e8ca4 100644
--- a/spec/lib/gitlab/lfs/client_spec.rb
+++ b/spec/lib/gitlab/lfs/client_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Lfs::Client do
let(:base_url) { "https://example.com" }
let(:username) { 'user' }
let(:password) { 'password' }
- let(:credentials) { { user: username, password: password, auth_method: 'password' } }
+ let(:credentials) { { user: username, password: password } }
let(:git_lfs_content_type) { 'application/vnd.git-lfs+json' }
let(:git_lfs_user_agent) { "GitLab #{Gitlab::VERSION} LFS client" }
diff --git a/spec/lib/gitlab/memory/instrumentation_spec.rb b/spec/lib/gitlab/memory/instrumentation_spec.rb
index 6b53550a3d0..0dbe9a8e275 100644
--- a/spec/lib/gitlab/memory/instrumentation_spec.rb
+++ b/spec/lib/gitlab/memory/instrumentation_spec.rb
@@ -69,10 +69,12 @@ RSpec.describe Gitlab::Memory::Instrumentation do
end
it 'a hash is returned' do
- is_expected.to include(
+ result = subject
+ expect(result).to include(
mem_objects: be > 1000,
mem_mallocs: be > 1000,
- mem_bytes: be > 100_000 # 100 items * 100 bytes each
+ mem_bytes: be > 100_000, # 100 items * 100 bytes each
+ mem_total_bytes: eq(result[:mem_bytes] + 40 * result[:mem_objects])
)
end
end
diff --git a/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb b/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
index b9d00b556c5..b868207e67c 100644
--- a/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
+++ b/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
@@ -42,44 +42,20 @@ RSpec.describe Gitlab::Middleware::RackMultipartTempfileFactory do
context 'for a multipart request' do
let(:env) { Rack::MockRequest.env_for('/', multipart_fixture) }
- context 'when the environment variable is enabled' do
- before do
- stub_env('GITLAB_TEMPFILE_IMMEDIATE_UNLINK', '1')
- end
-
- it 'immediately unlinks the temporary file' do
- tempfile = Tempfile.new('foo')
-
- expect(tempfile.path).not_to be(nil)
- expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
- expect(tempfile).to receive(:unlink).and_call_original
+ it 'immediately unlinks the temporary file' do
+ tempfile = Tempfile.new('foo')
- subject.call(env)
+ expect(tempfile.path).not_to be(nil)
+ expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
+ expect(tempfile).to receive(:unlink).and_call_original
- expect(tempfile.path).to be(nil)
- end
+ subject.call(env)
- it 'processes the request as normal' do
- expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
- end
+ expect(tempfile.path).to be(nil)
end
- context 'when the environment variable is disabled' do
- it 'does not immediately unlink the temporary file' do
- tempfile = Tempfile.new('foo')
-
- expect(tempfile.path).not_to be(nil)
- expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
- expect(tempfile).not_to receive(:unlink).and_call_original
-
- subject.call(env)
-
- expect(tempfile.path).not_to be(nil)
- end
-
- it 'processes the request as normal' do
- expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
- end
+ it 'processes the request as normal' do
+ expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
end
end
diff --git a/spec/lib/gitlab/middleware/speedscope_spec.rb b/spec/lib/gitlab/middleware/speedscope_spec.rb
new file mode 100644
index 00000000000..bb830a2fbda
--- /dev/null
+++ b/spec/lib/gitlab/middleware/speedscope_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'stackprof'
+
+RSpec.describe Gitlab::Middleware::Speedscope do
+ let(:app) { proc { |env| [200, { 'Content-Type' => 'text/plain' }, ['Hello world!']] } }
+ let(:middleware) { described_class.new(app) }
+
+ describe '#call' do
+ shared_examples 'returns original response' do
+ it 'returns original response' do
+ expect(StackProf).not_to receive(:run)
+
+ status, headers, body = middleware.call(env)
+
+ expect(status).to eq(200)
+ expect(headers).to eq({ 'Content-Type' => 'text/plain' })
+ expect(body.first).to eq('Hello world!')
+ end
+ end
+
+ context 'when flamegraph is not requested' do
+ let(:env) { Rack::MockRequest.env_for('/') }
+
+ it_behaves_like 'returns original response'
+ end
+
+ context 'when flamegraph requested' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'performance_bar' => 'flamegraph' }) }
+
+ before do
+ allow(env).to receive(:[]).and_call_original
+ end
+
+ context 'when user is not allowed' do
+ before do
+ allow(env).to receive(:[]).with('warden').and_return(double('Warden', user: create(:user)))
+ end
+
+ it_behaves_like 'returns original response'
+ end
+
+ context 'when user is allowed' do
+ before do
+ allow(env).to receive(:[]).with('warden').and_return(double('Warden', user: create(:admin)))
+ end
+
+ it 'runs StackProf and returns a flamegraph' do
+ expect(StackProf).to receive(:run).and_call_original
+
+ status, headers, body = middleware.call(env)
+
+ expect(status).to eq(200)
+ expect(headers).to eq({ 'Content-Type' => 'text/html' })
+ expect(body.first).to include('speedscope-iframe')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb
new file mode 100644
index 00000000000..26f9ea3a637
--- /dev/null
+++ b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Nav::TopNavMenuItem do
+ describe '.build' do
+ it 'builds a hash from the given args' do
+ item = {
+ id: 'id',
+ title: 'Title',
+ active: true,
+ icon: 'icon',
+ href: 'href',
+ method: 'method',
+ view: 'view',
+ css_class: 'css_class',
+ data: {}
+ }
+
+ expect(described_class.build(**item)).to eq(item)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/object_hierarchy_spec.rb b/spec/lib/gitlab/object_hierarchy_spec.rb
index eebd67695e0..7615b37521a 100644
--- a/spec/lib/gitlab/object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/object_hierarchy_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::ObjectHierarchy do
- let_it_be(:parent) { create(:group) }
+ let_it_be(:parent, reload: true) { create(:group) }
let_it_be(:child1) { create(:group, parent: parent) }
let_it_be(:child2) { create(:group, parent: child1) }
diff --git a/spec/lib/gitlab/pages/settings_spec.rb b/spec/lib/gitlab/pages/settings_spec.rb
index c89bf9ff206..1a7c808d1bf 100644
--- a/spec/lib/gitlab/pages/settings_spec.rb
+++ b/spec/lib/gitlab/pages/settings_spec.rb
@@ -47,12 +47,4 @@ RSpec.describe Gitlab::Pages::Settings do
end
end
end
-
- describe '#local_store' do
- subject(:local_store) { described_class.new(settings).local_store }
-
- it 'is an instance of Gitlab::Pages::Stores::LocalStore' do
- expect(local_store).to be_a(Gitlab::Pages::Stores::LocalStore)
- end
- end
end
diff --git a/spec/lib/gitlab/pages/stores/local_store_spec.rb b/spec/lib/gitlab/pages/stores/local_store_spec.rb
deleted file mode 100644
index adab81b2589..00000000000
--- a/spec/lib/gitlab/pages/stores/local_store_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Pages::Stores::LocalStore do
- describe '#enabled' do
- let(:local_store) { double(enabled: true) }
-
- subject(:local_store_enabled) { described_class.new(local_store).enabled }
-
- context 'when the pages_update_legacy_storage FF is disabled' do
- before do
- stub_feature_flags(pages_update_legacy_storage: false)
- end
-
- it { is_expected.to be_falsey }
- end
-
- context 'when the pages_update_legacy_storage FF is enabled' do
- it 'is equal to the original value' do
- expect(local_store_enabled).to eq(local_store.enabled)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
new file mode 100644
index 00000000000..656ae73945e
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::Iterator do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue_list_with_same_pos) { create_list(:issue, 3, project: project, relative_position: 100, updated_at: 1.day.ago) }
+ let_it_be(:issue_list_with_null_pos) { create_list(:issue, 3, project: project, relative_position: nil, updated_at: 1.day.ago) }
+ let_it_be(:issue_list_with_asc_pos) { create_list(:issue, 3, :with_asc_relative_position, project: project, updated_at: 1.day.ago) }
+
+ let(:klass) { Issue }
+ let(:column) { 'relative_position' }
+ let(:direction) { :asc }
+ let(:reverse_direction) { ::Gitlab::Pagination::Keyset::ColumnOrderDefinition::REVERSED_ORDER_DIRECTIONS[direction] }
+ let(:nulls_position) { :nulls_last }
+ let(:reverse_nulls_position) { ::Gitlab::Pagination::Keyset::ColumnOrderDefinition::REVERSED_NULL_POSITIONS[nulls_position] }
+ let(:custom_reorder) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: column,
+ column_expression: klass.arel_table[column],
+ order_expression: ::Gitlab::Database.nulls_order(column, direction, nulls_position),
+ reversed_order_expression: ::Gitlab::Database.nulls_order(column, reverse_direction, reverse_nulls_position),
+ order_direction: direction,
+ nullable: nulls_position,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: klass.arel_table[:id].send(direction),
+ add_to_projections: true
+ )
+ ])
+ end
+
+ let(:scope) { project.issues.reorder(custom_reorder) }
+
+ subject { described_class.new(scope: scope) }
+
+ describe '.each_batch' do
+ it 'yields an ActiveRecord::Relation when a block is given' do
+ subject.each_batch(of: 1) do |relation|
+ expect(relation).to be_a_kind_of(ActiveRecord::Relation)
+ end
+ end
+
+ it 'accepts a custom batch size' do
+ count = 0
+
+ subject.each_batch(of: 2) { |relation| count += relation.count(:all) }
+
+ expect(count).to eq(9)
+ end
+
+ it 'allows updating of the yielded relations' do
+ time = Time.current
+
+ subject.each_batch(of: 2) do |relation|
+ relation.update_all(updated_at: time)
+ end
+
+ expect(Issue.where(updated_at: time).count).to eq(9)
+ end
+
+ context 'with ordering direction' do
+ context 'when ordering asc' do
+ it 'orders ascending by default, including secondary order column' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.order_relative_position_asc.order(id: :asc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when reversing asc order' do
+ let(:scope) { project.issues.order(custom_reorder.reversed_order) }
+
+ it 'orders in reverse of ascending' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.order_relative_position_desc.order(id: :desc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when asc order, with nulls first' do
+ let(:nulls_position) { :nulls_first }
+
+ it 'orders ascending with nulls first' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when ordering desc' do
+ let(:direction) { :desc }
+ let(:nulls_position) { :nulls_last }
+
+ it 'orders descending' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when ordering by columns are repeated twice' do
+ let(:direction) { :desc }
+ let(:column) { :id }
+
+ it 'orders descending' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:id)) }
+
+ expect(positions).to eq(project.issues.reorder(id: :desc).pluck(:id))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index 06a8aee1048..26f52745b54 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -3,76 +3,77 @@
require 'spec_helper'
RSpec.describe Gitlab::Pagination::Keyset::Order do
- let(:table) { Arel::Table.new(:my_table) }
- let(:order) { nil }
+ describe 'paginate over items correctly' do
+ let(:table) { Arel::Table.new(:my_table) }
+ let(:order) { nil }
- def run_query(query)
- ActiveRecord::Base.connection.execute(query).to_a
- end
+ def run_query(query)
+ ActiveRecord::Base.connection.execute(query).to_a
+ end
- def build_query(order:, where_conditions: nil, limit: nil)
- <<-SQL
+ def build_query(order:, where_conditions: nil, limit: nil)
+ <<-SQL
SELECT id, year, month
FROM (#{table_data}) my_table (id, year, month)
WHERE #{where_conditions || '1=1'}
ORDER BY #{order}
LIMIT #{limit || 999};
- SQL
- end
-
- def iterate_and_collect(order:, page_size:, where_conditions: nil)
- all_items = []
-
- loop do
- paginated_items = run_query(build_query(order: order, where_conditions: where_conditions, limit: page_size))
- break if paginated_items.empty?
-
- all_items.concat(paginated_items)
- last_item = paginated_items.last
- cursor_attributes = order.cursor_attributes_for_node(last_item)
- where_conditions = order.build_where_values(cursor_attributes).to_sql
+ SQL
end
- all_items
- end
+ def iterate_and_collect(order:, page_size:, where_conditions: nil)
+ all_items = []
- subject do
- run_query(build_query(order: order))
- end
+ loop do
+ paginated_items = run_query(build_query(order: order, where_conditions: where_conditions, limit: page_size))
+ break if paginated_items.empty?
+
+ all_items.concat(paginated_items)
+ last_item = paginated_items.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.where_values_with_or_query(cursor_attributes).to_sql
+ end
- shared_examples 'order examples' do
- it { expect(subject).to eq(expected) }
+ all_items
+ end
- context 'when paginating forwards' do
- subject { iterate_and_collect(order: order, page_size: 2) }
+ subject do
+ run_query(build_query(order: order))
+ end
+ shared_examples 'order examples' do
it { expect(subject).to eq(expected) }
- context 'with different page size' do
- subject { iterate_and_collect(order: order, page_size: 5) }
+ context 'when paginating forwards' do
+ subject { iterate_and_collect(order: order, page_size: 2) }
it { expect(subject).to eq(expected) }
- end
- end
- context 'when paginating backwards' do
- subject do
- last_item = expected.last
- cursor_attributes = order.cursor_attributes_for_node(last_item)
- where_conditions = order.reversed_order.build_where_values(cursor_attributes)
+ context 'with different page size' do
+ subject { iterate_and_collect(order: order, page_size: 5) }
- iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions.to_sql)
+ it { expect(subject).to eq(expected) }
+ end
end
- it do
- expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject
+ context 'when paginating backwards' do
+ subject do
+ last_item = expected.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.reversed_order.where_values_with_or_query(cursor_attributes)
+
+ iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions.to_sql)
+ end
+
+ it do
+ expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject
+ end
end
end
- end
- context 'when ordering by a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 0, 0),
(2, 0, 0),
(3, 0, 0),
@@ -82,41 +83,41 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(7, 0, 0),
(8, 0, 0),
(9, 0, 0)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 9, "year" => 0, "month" => 0 },
- { "id" => 8, "year" => 0, "month" => 0 },
- { "id" => 7, "year" => 0, "month" => 0 },
- { "id" => 6, "year" => 0, "month" => 0 },
- { "id" => 5, "year" => 0, "month" => 0 },
- { "id" => 4, "year" => 0, "month" => 0 },
- { "id" => 3, "year" => 0, "month" => 0 },
- { "id" => 2, "year" => 0, "month" => 0 },
- { "id" => 1, "year" => 0, "month" => 0 }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 9, "year" => 0, "month" => 0 },
+ { "id" => 8, "year" => 0, "month" => 0 },
+ { "id" => 7, "year" => 0, "month" => 0 },
+ { "id" => 6, "year" => 0, "month" => 0 },
+ { "id" => 5, "year" => 0, "month" => 0 },
+ { "id" => 4, "year" => 0, "month" => 0 },
+ { "id" => 3, "year" => 0, "month" => 0 },
+ { "id" => 2, "year" => 0, "month" => 0 },
+ { "id" => 1, "year" => 0, "month" => 0 }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by two non-nullable columns and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by two non-nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, 2),
(2, 2011, 1),
(3, 2009, 2),
@@ -126,55 +127,55 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(7, 2010, 3),
(8, 2012, 4),
(9, 2013, 5)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: table['month'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: table['month'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { 'year' => 2009, 'month' => 2, 'id' => 3 },
- { 'year' => 2009, 'month' => 2, 'id' => 6 },
- { 'year' => 2010, 'month' => 2, 'id' => 1 },
- { 'year' => 2010, 'month' => 3, 'id' => 7 },
- { 'year' => 2011, 'month' => 1, 'id' => 2 },
- { 'year' => 2011, 'month' => 1, 'id' => 4 },
- { 'year' => 2011, 'month' => 1, 'id' => 5 },
- { 'year' => 2012, 'month' => 4, 'id' => 8 },
- { 'year' => 2013, 'month' => 5, 'id' => 9 }
- ]
- end
+ let(:expected) do
+ [
+ { 'year' => 2009, 'month' => 2, 'id' => 3 },
+ { 'year' => 2009, 'month' => 2, 'id' => 6 },
+ { 'year' => 2010, 'month' => 2, 'id' => 1 },
+ { 'year' => 2010, 'month' => 3, 'id' => 7 },
+ { 'year' => 2011, 'month' => 1, 'id' => 2 },
+ { 'year' => 2011, 'month' => 1, 'id' => 4 },
+ { 'year' => 2011, 'month' => 1, 'id' => 5 },
+ { 'year' => 2012, 'month' => 4, 'id' => 8 },
+ { 'year' => 2013, 'month' => 5, 'id' => 9 }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by nullable columns and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, null),
(2, 2011, 2),
(3, null, null),
@@ -186,61 +187,61 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(9, null, 2),
(10, null, null),
(11, 2010, 2)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: Gitlab::Database.nulls_last_order('year', :asc),
- reversed_order_expression: Gitlab::Database.nulls_first_order('year', :desc),
- order_direction: :asc,
- nullable: :nulls_last,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: Gitlab::Database.nulls_last_order('month', :asc),
- reversed_order_expression: Gitlab::Database.nulls_first_order('month', :desc),
- order_direction: :asc,
- nullable: :nulls_last,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_last_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_last_order('month', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('month', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 7, "year" => 2010, "month" => 2 },
- { "id" => 11, "year" => 2010, "month" => 2 },
- { "id" => 1, "year" => 2010, "month" => nil },
- { "id" => 5, "year" => 2010, "month" => nil },
- { "id" => 2, "year" => 2011, "month" => 2 },
- { "id" => 6, "year" => 2011, "month" => 2 },
- { "id" => 8, "year" => 2012, "month" => 2 },
- { "id" => 9, "year" => nil, "month" => 2 },
- { "id" => 4, "year" => nil, "month" => 5 },
- { "id" => 3, "year" => nil, "month" => nil },
- { "id" => 10, "year" => nil, "month" => nil }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by nullable columns with nulls first ordering and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by nullable columns with nulls first ordering and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, null),
(2, 2011, 2),
(3, null, null),
@@ -252,61 +253,61 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(9, null, 2),
(10, null, null),
(11, 2010, 2)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: Gitlab::Database.nulls_first_order('year', :asc),
- reversed_order_expression: Gitlab::Database.nulls_last_order('year', :desc),
- order_direction: :asc,
- nullable: :nulls_first,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: Gitlab::Database.nulls_first_order('month', :asc),
- order_direction: :asc,
- reversed_order_expression: Gitlab::Database.nulls_last_order('month', :desc),
- nullable: :nulls_first,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_first_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_first_order('month', :asc),
+ order_direction: :asc,
+ reversed_order_expression: Gitlab::Database.nulls_last_order('month', :desc),
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 3, "year" => nil, "month" => nil },
- { "id" => 10, "year" => nil, "month" => nil },
- { "id" => 9, "year" => nil, "month" => 2 },
- { "id" => 4, "year" => nil, "month" => 5 },
- { "id" => 1, "year" => 2010, "month" => nil },
- { "id" => 5, "year" => 2010, "month" => nil },
- { "id" => 7, "year" => 2010, "month" => 2 },
- { "id" => 11, "year" => 2010, "month" => 2 },
- { "id" => 2, "year" => 2011, "month" => 2 },
- { "id" => 6, "year" => 2011, "month" => 2 },
- { "id" => 8, "year" => 2012, "month" => 2 }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by non-nullable columns with mixed directions and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by non-nullable columns with mixed directions and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, 0),
(2, 2011, 0),
(3, 2010, 0),
@@ -318,158 +319,216 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(9, 2013, 0),
(10, 2014, 0),
(11, 2013, 0)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 7, "year" => 2010, "month" => 0 },
- { "id" => 4, "year" => 2010, "month" => 0 },
- { "id" => 3, "year" => 2010, "month" => 0 },
- { "id" => 1, "year" => 2010, "month" => 0 },
- { "id" => 8, "year" => 2011, "month" => 0 },
- { "id" => 2, "year" => 2011, "month" => 0 },
- { "id" => 6, "year" => 2012, "month" => 0 },
- { "id" => 5, "year" => 2012, "month" => 0 },
- { "id" => 11, "year" => 2013, "month" => 0 },
- { "id" => 9, "year" => 2013, "month" => 0 },
- { "id" => 10, "year" => 2014, "month" => 0 }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 0 },
+ { "id" => 4, "year" => 2010, "month" => 0 },
+ { "id" => 3, "year" => 2010, "month" => 0 },
+ { "id" => 1, "year" => 2010, "month" => 0 },
+ { "id" => 8, "year" => 2011, "month" => 0 },
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 },
+ { "id" => 5, "year" => 2012, "month" => 0 },
+ { "id" => 11, "year" => 2013, "month" => 0 },
+ { "id" => 9, "year" => 2013, "month" => 0 },
+ { "id" => 10, "year" => 2014, "month" => 0 }
+ ]
+ end
- it 'takes out a slice between two cursors' do
- after_cursor = { "id" => 8, "year" => 2011 }
- before_cursor = { "id" => 5, "year" => 2012 }
+ it 'takes out a slice between two cursors' do
+ after_cursor = { "id" => 8, "year" => 2011 }
+ before_cursor = { "id" => 5, "year" => 2012 }
- after_conditions = order.build_where_values(after_cursor)
- reversed = order.reversed_order
- before_conditions = reversed.build_where_values(before_cursor)
+ after_conditions = order.where_values_with_or_query(after_cursor)
+ reversed = order.reversed_order
+ before_conditions = reversed.where_values_with_or_query(before_cursor)
- query = build_query(order: order, where_conditions: "(#{after_conditions.to_sql}) AND (#{before_conditions.to_sql})", limit: 100)
+ query = build_query(order: order, where_conditions: "(#{after_conditions.to_sql}) AND (#{before_conditions.to_sql})", limit: 100)
- expect(run_query(query)).to eq([
- { "id" => 2, "year" => 2011, "month" => 0 },
- { "id" => 6, "year" => 2012, "month" => 0 }
- ])
+ expect(run_query(query)).to eq([
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 }
+ ])
+ end
end
- end
- context 'when the passed cursor values do not match with the order definition' do
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ context 'when the passed cursor values do not match with the order definition' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- context 'when values are missing' do
- it 'raises error' do
- expect { order.build_where_values(id: 1) }.to raise_error(/Missing items: year/)
+ context 'when values are missing' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1) }.to raise_error(/Missing items: year/)
+ end
end
- end
- context 'when extra values are present' do
- it 'raises error' do
- expect { order.build_where_values(id: 1, year: 2, foo: 3) }.to raise_error(/Extra items: foo/)
+ context 'when extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1, year: 2, foo: 3) }.to raise_error(/Extra items: foo/)
+ end
end
- end
- context 'when values are missing and extra values are present' do
- it 'raises error' do
- expect { order.build_where_values(year: 2, foo: 3) }.to raise_error(/Extra items: foo\. Missing items: id/)
+ context 'when values are missing and extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(year: 2, foo: 3) }.to raise_error(/Extra items: foo\. Missing items: id/)
+ end
end
- end
- context 'when no values are passed' do
- it 'returns nil' do
- expect(order.build_where_values({})).to eq(nil)
+ context 'when no values are passed' do
+ it 'returns empty array' do
+ expect(order.build_where_values({})).to eq([])
+ end
end
end
- end
- context 'extract and apply cursor attributes' do
- let(:model) { Project.new(id: 100) }
- let(:scope) { Project.all }
+ context 'extract and apply cursor attributes' do
+ let(:model) { Project.new(id: 100) }
+ let(:scope) { Project.all }
- shared_examples 'cursor attribute examples' do
- describe '#cursor_attributes_for_node' do
- it { expect(order.cursor_attributes_for_node(model)).to eq({ id: '100' }.with_indifferent_access) }
- end
+ shared_examples 'cursor attribute examples' do
+ describe '#cursor_attributes_for_node' do
+ it { expect(order.cursor_attributes_for_node(model)).to eq({ id: '100' }.with_indifferent_access) }
+ end
+
+ describe '#apply_cursor_conditions' do
+ context 'when params with string keys are passed' do
+ subject(:sql) { order.apply_cursor_conditions(scope, { 'id' => '100' }).to_sql }
- describe '#apply_cursor_conditions' do
- context 'when params with string keys are passed' do
- subject(:sql) { order.apply_cursor_conditions(scope, { 'id' => '100' }).to_sql }
+ it { is_expected.to include('"projects"."id" < 100)') }
+ end
- it { is_expected.to include('"projects"."id" < 100)') }
+ context 'when params with symbol keys are passed' do
+ subject(:sql) { order.apply_cursor_conditions(scope, { id: '100' }).to_sql }
+
+ it { is_expected.to include('"projects"."id" < 100)') }
+ end
end
+ end
- context 'when params with symbol keys are passed' do
- subject(:sql) { order.apply_cursor_conditions(scope, { id: '100' }).to_sql }
+ context 'when string attribute name is given' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- it { is_expected.to include('"projects"."id" < 100)') }
+ it_behaves_like 'cursor attribute examples'
+ end
+
+ context 'when symbol attribute name is given' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
+
+ it_behaves_like 'cursor attribute examples'
end
end
+ end
- context 'when string attribute name is given' do
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- order_expression: Project.arel_table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ describe 'UNION optimization' do
+ let_it_be(:five_months_ago) { 5.months.ago }
+
+ let_it_be(:user_1) { create(:user, created_at: five_months_ago) }
+ let_it_be(:user_2) { create(:user, created_at: five_months_ago) }
+ let_it_be(:user_3) { create(:user, created_at: 1.month.ago) }
+ let_it_be(:user_4) { create(:user, created_at: 2.months.ago) }
+
+ let(:expected_results) { [user_3, user_4, user_2, user_1] }
+ let(:scope) { User.order(created_at: :desc, id: :desc) }
+ let(:keyset_aware_scope) { Gitlab::Pagination::Keyset::SimpleOrderBuilder.build(scope).first }
+ let(:iterator_options) { { scope: keyset_aware_scope } }
+
+ subject(:items) do
+ [].tap do |collector|
+ Gitlab::Pagination::Keyset::Iterator.new(**iterator_options).each_batch(of: 2) do |models|
+ collector.concat(models)
+ end
end
+ end
- it_behaves_like 'cursor attribute examples'
+ context 'when UNION optimization is off' do
+ it 'returns items in the correct order' do
+ iterator_options[:use_union_optimization] = false
+
+ expect(items).to eq(expected_results)
+ end
end
- context 'when symbol attribute name is given' do
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: :id,
- order_expression: Project.arel_table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ context 'when UNION optimization is on' do
+ before do
+ iterator_options[:use_union_optimization] = true
end
- it_behaves_like 'cursor attribute examples'
+ it 'returns items in the correct order' do
+ expect(items).to eq(expected_results)
+ end
+
+ it 'calls Gitlab::SQL::Union' do
+ expect_next_instances_of(Gitlab::SQL::Union, 2) do |instance|
+ expect(instance.send(:remove_order)).to eq(false) # Do not remove order from the queries
+ expect(instance.send(:remove_duplicates)).to eq(false) # Do not deduplicate the results
+ end
+
+ items
+ end
+
+ it 'builds UNION query' do
+ cursor_attributes = { created_at: five_months_ago, id: user_2.id }
+ order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(keyset_aware_scope)
+
+ query = order.apply_cursor_conditions(scope, cursor_attributes, use_union_optimization: true).to_sql
+ expect(query).to include('UNION ALL')
+ end
end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
new file mode 100644
index 00000000000..5af86cb2dc0
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
+ let(:ordered_scope) { described_class.build(scope).first }
+ let(:order_object) { Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(ordered_scope) }
+
+ subject(:sql_with_order) { ordered_scope.to_sql }
+
+ context 'when no order present' do
+ let(:scope) { Project.where(id: [1, 2, 3]) }
+
+ it 'orders by primary key' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."id" DESC')
+ end
+
+ it 'sets the column definition distinct and not nullable' do
+ column_definition = order_object.column_definitions.first
+
+ expect(column_definition).to be_not_nullable
+ expect(column_definition).to be_distinct
+ end
+ end
+
+ context 'when primary key order present' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(id: :asc) }
+
+ it 'orders by primary key without altering the direction' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."id" ASC')
+ end
+ end
+
+ context 'when ordered by other column' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(created_at: :asc) }
+
+ it 'adds extra primary key order as tie-breaker' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."created_at" ASC, "projects"."id" DESC')
+ end
+
+ it 'sets the column definition for created_at non-distinct and nullable' do
+ column_definition = order_object.column_definitions.first
+
+ expect(column_definition.attribute_name).to eq('created_at')
+ expect(column_definition.nullable?).to eq(true) # be_nullable calls non_null? method for some reason
+ expect(column_definition).not_to be_distinct
+ end
+ end
+
+ context 'when ordered by two columns where the last one is the tie breaker' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(created_at: :asc, id: :asc) }
+
+ it 'preserves the order' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."created_at" ASC, "projects"."id" ASC')
+ end
+ end
+
+ context 'when non-nullable column is given' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(namespace_id: :asc, id: :asc) }
+
+ it 'sets the column definition for namespace_id non-distinct and non-nullable' do
+ column_definition = order_object.column_definitions.first
+
+ expect(column_definition.attribute_name).to eq('namespace_id')
+ expect(column_definition).to be_not_nullable
+ expect(column_definition).not_to be_distinct
+ end
+ end
+
+ context 'return :unable_to_order symbol when order cannot be built' do
+ subject(:success) { described_class.build(scope).last }
+
+ context 'when raw SQL order is given' do
+ let(:scope) { Project.order('id DESC') }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when NULLS LAST order is given' do
+ let(:scope) { Project.order(::Gitlab::Database.nulls_last_order('created_at', 'ASC')) }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when more than 2 columns are given for the order' do
+ let(:scope) { Project.order(created_at: :asc, updated_at: :desc, id: :asc) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/performance_bar_spec.rb b/spec/lib/gitlab/performance_bar_spec.rb
index 12916c41f0f..b7564dc95f8 100644
--- a/spec/lib/gitlab/performance_bar_spec.rb
+++ b/spec/lib/gitlab/performance_bar_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::PerformanceBar do
it { expect(described_class.l1_cache_backend).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
it { expect(described_class.l2_cache_backend).to eq(Rails.cache) }
- describe '.enabled_for_user?' do
+ describe '.allowed_for_user?' do
let(:user) { create(:user) }
before do
@@ -14,24 +14,24 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns false when given user is nil' do
- expect(described_class.enabled_for_user?(nil)).to be_falsy
+ expect(described_class.allowed_for_user?(nil)).to be_falsy
end
it 'returns true when given user is an admin' do
user = build_stubbed(:user, :admin)
- expect(described_class.enabled_for_user?(user)).to be_truthy
+ expect(described_class.allowed_for_user?(user)).to be_truthy
end
it 'returns false when allowed_group_id is nil' do
expect(described_class).to receive(:allowed_group_id).and_return(nil)
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
context 'when allowed group ID does not exist' do
it 'returns false' do
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
end
@@ -44,15 +44,15 @@ RSpec.describe Gitlab::PerformanceBar do
context 'when user is not a member of the allowed group' do
it 'returns false' do
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
context 'caching of allowed user IDs' do
- subject { described_class.enabled_for_user?(user) }
+ subject { described_class.allowed_for_user?(user) }
before do
# Warm the caches
- described_class.enabled_for_user?(user)
+ described_class.allowed_for_user?(user)
end
it_behaves_like 'allowed user IDs are cached'
@@ -65,15 +65,15 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns true' do
- expect(described_class.enabled_for_user?(user)).to be_truthy
+ expect(described_class.allowed_for_user?(user)).to be_truthy
end
context 'caching of allowed user IDs' do
- subject { described_class.enabled_for_user?(user) }
+ subject { described_class.allowed_for_user?(user) }
before do
# Warm the caches
- described_class.enabled_for_user?(user)
+ described_class.allowed_for_user?(user)
end
it_behaves_like 'allowed user IDs are cached'
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns the nested group' do
- expect(described_class.enabled_for_user?(user)).to be_truthy
+ expect(described_class.allowed_for_user?(user)).to be_truthy
end
end
@@ -101,7 +101,7 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns false' do
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
end
end
diff --git a/spec/lib/gitlab/prometheus/adapter_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb
index 9d4806ea73b..259202178a2 100644
--- a/spec/lib/gitlab/prometheus/adapter_spec.rb
+++ b/spec/lib/gitlab/prometheus/adapter_spec.rb
@@ -58,8 +58,8 @@ RSpec.describe Gitlab::Prometheus::Adapter do
context 'with cluster with prometheus integration' do
let!(:prometheus_integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
- it 'returns the integration instead' do
- expect(subject.prometheus_adapter).to eq(prometheus_integration)
+ it 'returns the application' do
+ expect(subject.prometheus_adapter).to eq(prometheus)
end
end
end
diff --git a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
index 3c7496cabd0..559557f9313 100644
--- a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
+++ b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
end
before do
- allow(described_class).to receive(:load_yaml_file) { YAML.load(sample_yaml) }
+ allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(sample_yaml) }
end
it 'parses to two metric groups with 2 and 1 metric respectively' do
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
shared_examples 'required field' do |field_name|
context "when #{field_name} is nil" do
before do
- allow(described_class).to receive(:load_yaml_file) { YAML.load(field_missing) }
+ allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_missing) }
end
it 'throws parsing error' do
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
context "when #{field_name} are not specified" do
before do
- allow(described_class).to receive(:load_yaml_file) { YAML.load(field_nil) }
+ allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_nil) }
end
it 'throws parsing error' do
diff --git a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
index 0b012bfd970..7e28649e634 100644
--- a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
+++ b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
@@ -13,7 +13,9 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
shared_examples 'arg line with valid parameters' do
it 'return time and date array' do
- expect(subject.new(valid_arg).execute).to eq(expected_response)
+ freeze_time do
+ expect(subject.new(valid_arg).execute).to eq(expected_response)
+ end
end
end
@@ -53,7 +55,7 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
it_behaves_like 'arg line with valid parameters' do
let(:valid_arg) { '2m 3m 5m 1h' }
let(:time) { Gitlab::TimeTrackingFormatter.parse(valid_arg) }
- let(:date) { DateTime.now.to_date }
+ let(:date) { DateTime.current }
let(:expected_response) { [time, date] }
end
end
diff --git a/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
index 2cb31b00f39..bd167ee2e3e 100644
--- a/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
+++ b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Gitlab::RackAttack::InstrumentedCacheStore do
begin
test_proc.call(subject)
- rescue => e
+ rescue StandardError => e
exception = e
end
ensure
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index f62a3c74005..28447d5c2a9 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -427,6 +427,19 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2fmy_package') }
end
+ describe '.terraform_module_package_name_regex' do
+ subject { described_class.terraform_module_package_name_regex }
+
+ it { is_expected.to match('my-module/my-system') }
+ it { is_expected.to match('my/module') }
+ it { is_expected.not_to match('my-module') }
+ it { is_expected.not_to match('My-Module') }
+ it { is_expected.not_to match('my_module') }
+ it { is_expected.not_to match('my.module') }
+ it { is_expected.not_to match('../../../my-module') }
+ it { is_expected.not_to match('%2e%2e%2fmy-module') }
+ end
+
describe '.pypi_version_regex' do
subject { described_class.pypi_version_regex }
@@ -628,6 +641,50 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('hé') }
end
+ describe '.helm_channel_regex' do
+ subject { described_class.helm_channel_regex }
+
+ it { is_expected.to match('release') }
+ it { is_expected.to match('my-repo') }
+ it { is_expected.to match('my-repo42') }
+
+ # Do not allow empty
+ it { is_expected.not_to match('') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
+ describe '.helm_package_regex' do
+ subject { described_class.helm_package_regex }
+
+ it { is_expected.to match('release') }
+ it { is_expected.to match('my-repo') }
+ it { is_expected.to match('my-repo42') }
+
+ # Do not allow empty
+ it { is_expected.not_to match('') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+
+ it { is_expected.not_to match('my/../repo') }
+ it { is_expected.not_to match('me%2f%2e%2e%2f') }
+ end
+
+ describe '.helm_version_regex' do
+ subject { described_class.helm_version_regex }
+
+ it { is_expected.to match('v1.2.3') }
+ it { is_expected.to match('v1.2.3-beta') }
+ it { is_expected.to match('v1.2.3-alpha.3') }
+ it { is_expected.not_to match('v1') }
+ it { is_expected.not_to match('v1.2') }
+ it { is_expected.not_to match('v1./2.3') }
+ it { is_expected.not_to match('v../../../../../1.2.3') }
+ it { is_expected.not_to match('v%2e%2e%2f1.2.3') }
+ end
+
describe '.semver_regex' do
subject { described_class.semver_regex }
@@ -726,4 +783,134 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('v../../../../../1.2.3') }
it { is_expected.not_to match('v%2e%2e%2f1.2.3') }
end
+
+ describe 'Packages::API_PATH_REGEX' do
+ subject { described_class::Packages::API_PATH_REGEX }
+
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/p/123456789') }
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/p2/pkg_name') }
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/packages') }
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/pkg_name') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/maven/a/path/file.jar') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/index') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/metadata/pkg_name/1.3.4') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/metadata/pkg_name/index') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/query') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/digest') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/download_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/digest') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/download_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/upload_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/upload_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/search') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name/authorize') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name/authorize') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/ping') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/users/authenticate') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/users/check_credentials') }
+ it { is_expected.to match('/api/v4/packages/maven/a/path/file.jar') }
+ it { is_expected.to match('/api/v4/packages/npm/-/package/pkg_name/dist-tags') }
+ it { is_expected.to match('/api/v4/packages/npm/-/package/pkg_name/dist-tags/tag') }
+ it { is_expected.to match('/api/v4/packages/npm/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/composer') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/composer/archives/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/digest') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/download_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/digest') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/download_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/upload_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/upload_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/search') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/ping') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/users/authenticate') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/users/check_credentials') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/compon/binary-x64/Packages') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/InRelease') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/Release') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/Release.gpg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/file.name/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/pool/compon/e/pkg/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/generic/pkg_name/1.3.4/myfile.txt') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/generic/pkg_name/1.3.4/myfile.txt/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/11.2.3.info') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/11.2.3.mod') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/11.2.3.zip') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/list') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/maven/a/path/file.jar') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/maven/a/path/file.jar/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/-/package/pkg_name/dist-tags') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/-/package/pkg_name/dist-tags/tag') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/pkg_name/-/tarball.tgz') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/download/pkg_name/1.3.4/pkg.npkg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/download/pkg_name/index') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/index') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/metadata/pkg_name/1.3.4') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/metadata/pkg_name/index') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/query') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi/files/1234567890/file.identifier') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi/simple/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/api/v1/dependencies') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/api/v1/gems') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/api/v1/gems/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/gems/pkg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/pkg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/quick/Marshal.4.8/pkg') }
+ it { is_expected.not_to match('') }
+ it { is_expected.not_to match('foo') }
+ it { is_expected.not_to match('/api/v4') }
+ it { is_expected.not_to match('/api/v4/version') }
+ it { is_expected.not_to match('/api/v4/packages') }
+ it { is_expected.not_to match('/api/v4/packages/') }
+ it { is_expected.not_to match('/api/v4/group') }
+ it { is_expected.not_to match('/api/v4/group/12345') }
+ it { is_expected.not_to match('/api/v4/group/12345/-') }
+ it { is_expected.not_to match('/api/v4/group/12345/-/packages') }
+ it { is_expected.not_to match('/api/v4/group/12345/-/packages/') }
+ it { is_expected.not_to match('/api/v4/group/12345/-/packages/50') }
+ it { is_expected.not_to match('/api/v4/groups') }
+ it { is_expected.not_to match('/api/v4/groups/12345') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-/packages') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-/packages/') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-/packages/50') }
+ it { is_expected.not_to match('/api/v4/groups/12345/packages') }
+ it { is_expected.not_to match('/api/v4/groups/12345/packages/') }
+ it { is_expected.not_to match('/api/v4/groups/12345/badges') }
+ it { is_expected.not_to match('/api/v4/groups/12345/issues') }
+ it { is_expected.not_to match('/api/v4/projects') }
+ it { is_expected.not_to match('/api/v4/projects/1234') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages/') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages/50') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages/50/package_files') }
+ it { is_expected.not_to match('/api/v4/projects/1234/merge_requests') }
+ it { is_expected.not_to match('/api/v4/projects/1234/registry/repositories') }
+ it { is_expected.not_to match('/api/v4/projects/1234/issues') }
+ it { is_expected.not_to match('/api/v4/projects/1234/members') }
+ it { is_expected.not_to match('/api/v4/projects/1234/milestones') }
+
+ # Group level Debian API endpoints are not matched as it's not using the correct prefix (groups/:id/-/packages/)
+ # TODO: Update Debian group level endpoints urls and adjust this specs: https://gitlab.com/gitlab-org/gitlab/-/issues/326805
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/compon/binary-compo/Packages') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/InRelease') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/Release') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/Release.gpg') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/pool/compon/a/pkg/file.name') }
+ end
end
diff --git a/spec/lib/gitlab/relative_positioning/item_context_spec.rb b/spec/lib/gitlab/relative_positioning/item_context_spec.rb
index daea8d8470d..3a469e53cb1 100644
--- a/spec/lib/gitlab/relative_positioning/item_context_spec.rb
+++ b/spec/lib/gitlab/relative_positioning/item_context_spec.rb
@@ -212,4 +212,20 @@ RSpec.describe Gitlab::RelativePositioning::ItemContext do
end
end
end
+
+ describe '#at_position' do
+ let_it_be(:issue) { create_issue(500) }
+ let_it_be(:issue_2) { create_issue(510) }
+
+ let(:subject) { described_class.new(issue, range) }
+
+ it 'finds the item at the specified position' do
+ expect(subject.at_position(500)).to eq(described_class.new(issue, range))
+ expect(subject.at_position(510)).to eq(described_class.new(issue_2, range))
+ end
+
+ it 'raises InvalidPosition when the item cannot be found' do
+ expect { subject.at_position(501) }.to raise_error Gitlab::RelativePositioning::InvalidPosition
+ end
+ end
end
diff --git a/spec/lib/gitlab/repository_cache_spec.rb b/spec/lib/gitlab/repository_cache_spec.rb
index 80285a6c732..3277135246d 100644
--- a/spec/lib/gitlab/repository_cache_spec.rb
+++ b/spec/lib/gitlab/repository_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositoryCache do
let_it_be(:project) { create(:project) }
+
let(:backend) { double('backend').as_null_object }
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
@@ -38,6 +39,7 @@ RSpec.describe Gitlab::RepositoryCache do
describe 'personal snippet repository' do
let_it_be(:personal_snippet) { create(:personal_snippet) }
+
let(:namespace) { repository.full_path }
it_behaves_like 'cache_key examples' do
diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb
index 9b4ca3f9dca..6b52c315a70 100644
--- a/spec/lib/gitlab/repository_hash_cache_spec.rb
+++ b/spec/lib/gitlab/repository_hash_cache_spec.rb
@@ -4,6 +4,7 @@ require "spec_helper"
RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
+
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
let(:cache) { described_class.new(repository) }
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index eaecbb0233d..881591ae805 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
+
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
let(:cache) { described_class.new(repository) }
@@ -34,6 +35,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
describe 'personal snippet repository' do
let_it_be(:personal_snippet) { create(:personal_snippet) }
+
let(:namespace) { repository.full_path }
it_behaves_like 'cache_key examples' do
diff --git a/spec/lib/gitlab/repository_size_checker_spec.rb b/spec/lib/gitlab/repository_size_checker_spec.rb
index 20c08da6c54..559f5fa66c6 100644
--- a/spec/lib/gitlab/repository_size_checker_spec.rb
+++ b/spec/lib/gitlab/repository_size_checker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositorySizeChecker do
let_it_be(:namespace) { nil }
+
let(:current_size) { 0 }
let(:limit) { 50 }
let(:enabled) { true }
diff --git a/spec/lib/gitlab/repository_size_error_message_spec.rb b/spec/lib/gitlab/repository_size_error_message_spec.rb
index 78504d201d4..633ec41ab00 100644
--- a/spec/lib/gitlab/repository_size_error_message_spec.rb
+++ b/spec/lib/gitlab/repository_size_error_message_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositorySizeErrorMessage do
let_it_be(:namespace) { build(:namespace) }
+
let(:checker) do
Gitlab::RepositorySizeChecker.new(
current_size_proc: -> { 15.megabytes },
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 1ec14092c63..0fcb7db7d5f 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -42,7 +42,19 @@ RSpec.describe Gitlab::Runtime do
end
end
- context "puma" do
+ # Puma has no cli_config method unless `puma/cli` is required
+ context "puma without cli_config" do
+ let(:puma_type) { double('::Puma') }
+
+ before do
+ stub_const('::Puma', puma_type)
+ stub_env('ACTION_CABLE_IN_APP', 'false')
+ end
+
+ it_behaves_like "valid runtime", :puma, 1
+ end
+
+ context "puma with cli_config" do
let(:puma_type) { double('::Puma') }
let(:max_workers) { 2 }
diff --git a/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
index 75e9c8c100b..dfe9358f70b 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
[
{
name: 'a',
+ worker_name: 'WorkerA',
feature_category: :category_a,
has_external_dependencies: false,
urgency: :low,
@@ -19,6 +20,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
},
{
name: 'a:2',
+ worker_name: 'WorkerA2',
feature_category: :category_a,
has_external_dependencies: false,
urgency: :high,
@@ -27,6 +29,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
},
{
name: 'b',
+ worker_name: 'WorkerB',
feature_category: :category_b,
has_external_dependencies: true,
urgency: :high,
@@ -35,6 +38,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
},
{
name: 'c',
+ worker_name: 'WorkerC',
feature_category: :category_c,
has_external_dependencies: false,
urgency: :throttled,
@@ -46,56 +50,62 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
context 'with valid input' do
where(:query, :expected_metadatas) do
+ # worker_name
+ 'worker_name=WorkerA' | %w(WorkerA)
+ 'worker_name=WorkerA2' | %w(WorkerA2)
+ 'worker_name=WorkerB|worker_name=WorkerD' | %w(WorkerB)
+ 'worker_name!=WorkerA' | %w(WorkerA2 WorkerB WorkerC)
+
# feature_category
- 'feature_category=category_a' | %w(a a:2)
- 'feature_category=category_a,category_c' | %w(a a:2 c)
- 'feature_category=category_a|feature_category=category_c' | %w(a a:2 c)
- 'feature_category!=category_a' | %w(b c)
+ 'feature_category=category_a' | %w(WorkerA WorkerA2)
+ 'feature_category=category_a,category_c' | %w(WorkerA WorkerA2 WorkerC)
+ 'feature_category=category_a|feature_category=category_c' | %w(WorkerA WorkerA2 WorkerC)
+ 'feature_category!=category_a' | %w(WorkerB WorkerC)
# has_external_dependencies
- 'has_external_dependencies=true' | %w(b)
- 'has_external_dependencies=false' | %w(a a:2 c)
- 'has_external_dependencies=true,false' | %w(a a:2 b c)
- 'has_external_dependencies=true|has_external_dependencies=false' | %w(a a:2 b c)
- 'has_external_dependencies!=true' | %w(a a:2 c)
+ 'has_external_dependencies=true' | %w(WorkerB)
+ 'has_external_dependencies=false' | %w(WorkerA WorkerA2 WorkerC)
+ 'has_external_dependencies=true,false' | %w(WorkerA WorkerA2 WorkerB WorkerC)
+ 'has_external_dependencies=true|has_external_dependencies=false' | %w(WorkerA WorkerA2 WorkerB WorkerC)
+ 'has_external_dependencies!=true' | %w(WorkerA WorkerA2 WorkerC)
# urgency
- 'urgency=high' | %w(a:2 b)
- 'urgency=low' | %w(a)
- 'urgency=high,low,throttled' | %w(a a:2 b c)
- 'urgency=low|urgency=throttled' | %w(a c)
- 'urgency!=high' | %w(a c)
+ 'urgency=high' | %w(WorkerA2 WorkerB)
+ 'urgency=low' | %w(WorkerA)
+ 'urgency=high,low,throttled' | %w(WorkerA WorkerA2 WorkerB WorkerC)
+ 'urgency=low|urgency=throttled' | %w(WorkerA WorkerC)
+ 'urgency!=high' | %w(WorkerA WorkerC)
# name
- 'name=a' | %w(a)
- 'name=a,b' | %w(a b)
- 'name=a,a:2|name=b' | %w(a a:2 b)
- 'name!=a,a:2' | %w(b c)
+ 'name=a' | %w(WorkerA)
+ 'name=a,b' | %w(WorkerA WorkerB)
+ 'name=a,a:2|name=b' | %w(WorkerA WorkerA2 WorkerB)
+ 'name!=a,a:2' | %w(WorkerB WorkerC)
# resource_boundary
- 'resource_boundary=memory' | %w(b c)
- 'resource_boundary=memory,cpu' | %w(a b c)
- 'resource_boundary=memory|resource_boundary=cpu' | %w(a b c)
- 'resource_boundary!=memory,cpu' | %w(a:2)
+ 'resource_boundary=memory' | %w(WorkerB WorkerC)
+ 'resource_boundary=memory,cpu' | %w(WorkerA WorkerB WorkerC)
+ 'resource_boundary=memory|resource_boundary=cpu' | %w(WorkerA WorkerB WorkerC)
+ 'resource_boundary!=memory,cpu' | %w(WorkerA2)
# tags
- 'tags=no_disk_io' | %w(a b)
- 'tags=no_disk_io,git_access' | %w(a a:2 b)
- 'tags=no_disk_io|tags=git_access' | %w(a a:2 b)
- 'tags=no_disk_io&tags=git_access' | %w(a)
- 'tags!=no_disk_io' | %w(a:2 c)
- 'tags!=no_disk_io,git_access' | %w(c)
+ 'tags=no_disk_io' | %w(WorkerA WorkerB)
+ 'tags=no_disk_io,git_access' | %w(WorkerA WorkerA2 WorkerB)
+ 'tags=no_disk_io|tags=git_access' | %w(WorkerA WorkerA2 WorkerB)
+ 'tags=no_disk_io&tags=git_access' | %w(WorkerA)
+ 'tags!=no_disk_io' | %w(WorkerA2 WorkerC)
+ 'tags!=no_disk_io,git_access' | %w(WorkerC)
'tags=unknown_tag' | []
- 'tags!=no_disk_io' | %w(a:2 c)
- 'tags!=no_disk_io,git_access' | %w(c)
- 'tags!=unknown_tag' | %w(a a:2 b c)
+ 'tags!=no_disk_io' | %w(WorkerA2 WorkerC)
+ 'tags!=no_disk_io,git_access' | %w(WorkerC)
+ 'tags!=unknown_tag' | %w(WorkerA WorkerA2 WorkerB WorkerC)
# combinations
- 'feature_category=category_a&urgency=high' | %w(a:2)
- 'feature_category=category_a&urgency=high|feature_category=category_c' | %w(a:2 c)
+ 'feature_category=category_a&urgency=high' | %w(WorkerA2)
+ 'feature_category=category_a&urgency=high|feature_category=category_c' | %w(WorkerA2 WorkerC)
# Match all
- '*' | %w(a a:2 b c)
+ '*' | %w(WorkerA WorkerA2 WorkerB WorkerC)
end
with_them do
@@ -103,7 +113,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
matched_metadatas = worker_metadatas.select do |metadata|
described_class.new(query).match?(metadata)
end
- expect(matched_metadatas.map { |m| m[:name] }).to match_array(expected_metadatas)
+ expect(matched_metadatas.map { |m| m[:worker_name] }).to match_array(expected_metadatas)
end
end
end
@@ -113,7 +123,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
'feature_category="category_a"' | described_class::InvalidTerm
'feature_category=' | described_class::InvalidTerm
'feature_category~category_a' | described_class::InvalidTerm
- 'worker_name=a' | described_class::UnknownPredicate
+ 'invalid_term=a' | described_class::UnknownPredicate
end
with_them do
diff --git a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
new file mode 100644
index 00000000000..687e35813b1
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
@@ -0,0 +1,212 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
+ describe '.queue_name_from_worker_name' do
+ using RSpec::Parameterized::TableSyntax
+
+ def create_worker(name, namespace = nil)
+ Class.new.tap do |worker|
+ worker.define_singleton_method(:name) { name }
+ worker.define_singleton_method(:queue_namespace) { namespace }
+ end
+ end
+
+ where(:worker, :expected_name) do
+ create_worker('PagesWorker') | 'pages'
+ create_worker('PipelineNotificationWorker') | 'pipeline_notification'
+ create_worker('PostReceive') | 'post_receive'
+ create_worker('PostReceive', :git) | 'git:post_receive'
+ create_worker('PipelineHooksWorker', :pipeline_hooks) | 'pipeline_hooks:pipeline_hooks'
+ create_worker('Gitlab::JiraImport::AdvanceStageWorker') | 'jira_import_advance_stage'
+ create_worker('Gitlab::PhabricatorImport::ImportTasksWorker', :importer) | 'importer:phabricator_import_import_tasks'
+ end
+
+ with_them do
+ it 'generates a valid queue name from worker name' do
+ expect(described_class.queue_name_from_worker_name(worker)).to eql(expected_name)
+ end
+ end
+ end
+
+ shared_context 'router examples setup' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'Gitlab::Foo::BarWorker'
+ end
+
+ include ApplicationWorker
+
+ feature_category :feature_a
+ urgency :low
+ worker_resource_boundary :cpu
+ tags :expensive
+ end
+ end
+
+ where(:routing_rules, :expected_queue) do
+ # Default, no configuration
+ [] | 'foo_bar'
+ # Does not match, fallback to the named queue
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=memory', 'queue_b'],
+ ['tags=cheap', 'queue_c']
+ ] | 'foo_bar'
+ # Match a nil queue, fallback to named queue
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=cpu', nil],
+ ['tags=cheap', 'queue_c']
+ ] | 'foo_bar'
+ # Match an empty string, fallback to named queue
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=cpu', ''],
+ ['tags=cheap', 'queue_c']
+ ] | 'foo_bar'
+ # Match the first rule
+ [
+ ['feature_category=feature_a|urgency=high', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=cheap', 'queue_c']
+ ] | 'queue_a'
+ # Match the first rule 2
+ [
+ ['feature_category=feature_b|urgency=low', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=cheap', 'queue_c']
+ ] | 'queue_a'
+ # Match the third rule
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=memory', 'queue_b'],
+ ['tags=expensive', 'queue_c']
+ ] | 'queue_c'
+ # Match all, first match wins
+ [
+ ['feature_category=feature_a|urgency=low', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=expensive', 'queue_c']
+ ] | 'queue_a'
+ # Match the same rule multiple times, the first match wins
+ [
+ ['feature_category=feature_a', 'queue_a'],
+ ['feature_category=feature_a', 'queue_b'],
+ ['feature_category=feature_a', 'queue_c']
+ ] | 'queue_a'
+ # Match wildcard
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=memory', 'queue_b'],
+ ['tags=cheap', 'queue_c'],
+ ['*', 'default']
+ ] | 'default'
+ # Match wildcard at the top of the chain. It makes the following rules useless
+ [
+ ['*', 'queue_foo'],
+ ['feature_category=feature_a|urgency=low', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=expensive', 'queue_c']
+ ] | 'queue_foo'
+ end
+ end
+
+ describe '.global' do
+ before do
+ described_class.remove_instance_variable(:@global_worker_router) if described_class.instance_variable_defined?(:@global_worker_router)
+ end
+
+ after do
+ described_class.remove_instance_variable(:@global_worker_router)
+ end
+
+ context 'valid routing rules' do
+ include_context 'router examples setup'
+
+ with_them do
+ before do
+ stub_config(sidekiq: { routing_rules: routing_rules })
+ end
+
+ it 'routes the worker to the correct queue' do
+ expect(described_class.global.route(worker)).to eql(expected_queue)
+ end
+ end
+ end
+
+ context 'invalid routing rules' do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'Gitlab::Foo::BarWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ before do
+ stub_config(sidekiq: { routing_rules: routing_rules })
+ end
+
+ context 'invalid routing rules format' do
+ let(:routing_rules) { ['feature_category=a'] }
+
+ it 'captures the error and falls back to an empty route' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(be_a(described_class::InvalidRoutingRuleError))
+
+ expect(described_class.global.route(worker)).to eql('foo_bar')
+ end
+ end
+
+ context 'invalid predicate' do
+ let(:routing_rules) { [['invalid_term=a', 'queue_a']] }
+
+ it 'captures the error and falls back to an empty route' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(
+ be_a(Gitlab::SidekiqConfig::WorkerMatcher::UnknownPredicate)
+ )
+
+ expect(described_class.global.route(worker)).to eql('foo_bar')
+ end
+ end
+ end
+ end
+
+ describe '#route' do
+ context 'valid routing rules' do
+ include_context 'router examples setup'
+
+ with_them do
+ it 'routes the worker to the correct queue' do
+ router = described_class.new(routing_rules)
+
+ expect(router.route(worker)).to eql(expected_queue)
+ end
+ end
+ end
+
+ context 'invalid routing rules' do
+ it 'raises an exception' do
+ expect { described_class.new(nil) }.to raise_error(described_class::InvalidRoutingRuleError)
+ expect { described_class.new(['feature_category=a']) }.to raise_error(described_class::InvalidRoutingRuleError)
+ expect { described_class.new([['feature_category=a', 'queue_a', 'queue_b']]) }.to raise_error(described_class::InvalidRoutingRuleError)
+ expect do
+ described_class.new(
+ [
+ ['feature_category=a', 'queue_b'],
+ ['feature_category=b']
+ ]
+ )
+ end.to raise_error(described_class::InvalidRoutingRuleError)
+ expect { described_class.new([['invalid_term=a', 'queue_a']]) }.to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::UnknownPredicate)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_config/worker_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
index 05987f95b33..0c43c33ff8c 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
def create_worker(queue:, **attributes)
namespace = queue.include?(':') && queue.split(':').first
inner_worker = double(
+ name: attributes[:worker_name] || 'Foo::BarWorker',
queue: queue,
queue_namespace: namespace,
get_feature_category: attributes[:feature_category],
@@ -87,6 +88,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
describe 'YAML encoding' do
it 'encodes the worker in YAML as a hash of the queue' do
attributes_a = {
+ worker_name: 'WorkerA',
feature_category: :source_code_management,
has_external_dependencies: false,
urgency: :low,
@@ -97,6 +99,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
}
attributes_b = {
+ worker_name: 'WorkerB',
feature_category: :not_owned,
has_external_dependencies: true,
urgency: :high,
diff --git a/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb b/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb
index 749c7af6f59..f93c0e28fc0 100644
--- a/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb
@@ -225,7 +225,7 @@ RSpec.describe Gitlab::SidekiqDaemon::Monitor do
after do
thread.kill
- rescue
+ rescue StandardError
end
it 'does log cancellation message' do
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 537844df72f..731c509e221 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect do
call_subject(job, 'test_queue') do
raise ArgumentError, 'Something went wrong'
- rescue
+ rescue StandardError
raise Sidekiq::JobRetry::Skip
end
end.to raise_error(Sidekiq::JobRetry::Skip)
@@ -86,7 +86,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect do
call_subject(job, 'test_queue') do
raise ArgumentError, 'Something went wrong'
- rescue
+ rescue StandardError
raise Sidekiq::JobRetry::Handled
end
end.to raise_error(Sidekiq::JobRetry::Handled)
@@ -293,6 +293,16 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
end
+
+ context 'when instrumentation data is not loaded' do
+ before do
+ allow(logger).to receive(:info)
+ end
+
+ it 'does not raise exception' do
+ expect { subject.call(job.dup, 'test_queue') {} }.not_to raise_error
+ end
+ end
end
describe '#add_time_keys!' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
index 75b1d9fd87e..7f11e8df97f 100644
--- a/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError do
it 'encapsulates worker info' do
exception = described_class.new(TestSizeLimiterWorker, 500, 300)
- expect(exception.message).to eql("TestSizeLimiterWorker job exceeds payload size limit (500/300)")
+ expect(exception.message).to eql("TestSizeLimiterWorker job exceeds payload size limit")
expect(exception.worker_class).to eql(TestSizeLimiterWorker)
expect(exception.size).to be(500)
expect(exception.size_limit).to be(300)
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
new file mode 100644
index 00000000000..b30143ed196
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -0,0 +1,215 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
+ def clear_queues
+ Sidekiq::Queue.new('authorized_projects').clear
+ Sidekiq::Queue.new('post_receive').clear
+ Sidekiq::RetrySet.new.clear
+ Sidekiq::ScheduledSet.new.clear
+ end
+
+ around do |example|
+ clear_queues
+ Sidekiq::Testing.disable!(&example)
+ clear_queues
+ end
+
+ describe '#execute', :aggregate_failures do
+ shared_examples 'processing a set' do
+ let(:migrator) { described_class.new(set_name) }
+
+ let(:set_after) do
+ Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
+ .map { |item, score| [Sidekiq.load_json(item), score] }
+ end
+
+ context 'when the set is empty' do
+ it 'returns the number of scanned and migrated jobs' do
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue')).to eq(scanned: 0, migrated: 0)
+ end
+ end
+
+ context 'when the set is not empty' do
+ it 'returns the number of scanned and migrated jobs' do
+ create_jobs
+
+ expect(migrator.execute({})).to eq(scanned: 4, migrated: 0)
+ end
+ end
+
+ context 'when there are no matching jobs' do
+ it 'does not change any queue names' do
+ create_jobs(include_post_receive: false)
+
+ expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 3, migrated: 0)
+
+ expect(set_after.length).to eq(3)
+ expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects',
+ 'class' => 'AuthorizedProjectsWorker'))
+ end
+ end
+
+ context 'when there are matching jobs' do
+ it 'migrates only the workers matching the given worker from the set' do
+ freeze_time do
+ create_jobs
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue')).to eq(scanned: 4, migrated: 3)
+
+ set_after.each.with_index do |(item, score), i|
+ if item['class'] == 'AuthorizedProjectsWorker'
+ expect(item).to include('queue' => 'new_queue', 'args' => [i])
+ else
+ expect(item).to include('queue' => 'post_receive', 'args' => [i])
+ end
+
+ expect(score).to eq(i.succ.hours.from_now.to_i)
+ end
+ end
+ end
+
+ it 'allows migrating multiple workers at once' do
+ freeze_time do
+ create_jobs
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'another_queue'))
+ .to eq(scanned: 4, migrated: 4)
+
+ set_after.each.with_index do |(item, score), i|
+ if item['class'] == 'AuthorizedProjectsWorker'
+ expect(item).to include('queue' => 'new_queue', 'args' => [i])
+ else
+ expect(item).to include('queue' => 'another_queue', 'args' => [i])
+ end
+
+ expect(score).to eq(i.succ.hours.from_now.to_i)
+ end
+ end
+ end
+
+ it 'allows migrating multiple workers to the same queue' do
+ freeze_time do
+ create_jobs
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'new_queue'))
+ .to eq(scanned: 4, migrated: 4)
+
+ set_after.each.with_index do |(item, score), i|
+ expect(item).to include('queue' => 'new_queue', 'args' => [i])
+ expect(score).to eq(i.succ.hours.from_now.to_i)
+ end
+ end
+ end
+
+ it 'does not try to migrate jobs that are removed from the set during the migration' do
+ freeze_time do
+ create_jobs
+
+ allow(migrator).to receive(:migrate_job).and_wrap_original do |meth, *args|
+ Sidekiq.redis { |c| c.zrem(set_name, args.first) }
+
+ meth.call(*args)
+ end
+
+ expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 0)
+
+ expect(set_after.length).to eq(3)
+ expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects'))
+ end
+ end
+
+ it 'does not try to migrate unmatched jobs that are added to the set during the migration' do
+ create_jobs
+
+ calls = 0
+
+ allow(migrator).to receive(:migrate_job).and_wrap_original do |meth, *args|
+ if calls == 0
+ travel_to(5.hours.from_now) { create_jobs(include_post_receive: false) }
+ end
+
+ calls += 1
+
+ meth.call(*args)
+ end
+
+ expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 1)
+
+ expect(set_after.group_by { |job| job.first['queue'] }.transform_values(&:count))
+ .to eq('authorized_projects' => 6, 'new_queue' => 1)
+ end
+
+ it 'iterates through the entire set of jobs' do
+ 50.times do |i|
+ travel_to(i.hours.from_now) { create_jobs }
+ end
+
+ expect(migrator.execute('NonExistentWorker' => 'new_queue')).to eq(scanned: 200, migrated: 0)
+
+ expect(set_after.length).to eq(200)
+ end
+
+ it 'logs output at the start, finish, and every LOG_FREQUENCY jobs' do
+ freeze_time do
+ create_jobs
+
+ stub_const("#{described_class}::LOG_FREQUENCY", 2)
+
+ logger = Logger.new(StringIO.new)
+ migrator = described_class.new(set_name, logger: logger)
+
+ expect(logger).to receive(:info).with(a_string_matching('Processing')).once.ordered
+ expect(logger).to receive(:info).with(a_string_matching('In progress')).once.ordered
+ expect(logger).to receive(:info).with(a_string_matching('Done')).once.ordered
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'new_queue'))
+ .to eq(scanned: 4, migrated: 4)
+ end
+ end
+ end
+ end
+
+ context 'scheduled jobs' do
+ let(:set_name) { 'schedule' }
+
+ def create_jobs(include_post_receive: true)
+ AuthorizedProjectsWorker.perform_in(1.hour, 0)
+ AuthorizedProjectsWorker.perform_in(2.hours, 1)
+ PostReceive.perform_in(3.hours, 2) if include_post_receive
+ AuthorizedProjectsWorker.perform_in(4.hours, 3)
+ end
+
+ it_behaves_like 'processing a set'
+ end
+
+ context 'retried jobs' do
+ let(:set_name) { 'retry' }
+
+ # Try to mimic as closely as possible what Sidekiq will actually
+ # do to retry a job.
+ def retry_in(klass, time, args)
+ # In Sidekiq 6, this argument will become a JSON string
+ message = { 'class' => klass, 'args' => [args], 'retry' => true }
+
+ allow(klass).to receive(:sidekiq_retry_in_block).and_return(proc { time })
+
+ begin
+ Sidekiq::JobRetry.new.local(klass, message, klass.queue) { raise 'boom' }
+ rescue Sidekiq::JobRetry::Skip
+ # Sidekiq scheduled the retry
+ end
+ end
+
+ def create_jobs(include_post_receive: true)
+ retry_in(AuthorizedProjectsWorker, 1.hour, 0)
+ retry_in(AuthorizedProjectsWorker, 2.hours, 1)
+ retry_in(PostReceive, 3.hours, 2) if include_post_receive
+ retry_in(AuthorizedProjectsWorker, 4.hours, 3)
+ end
+
+ it_behaves_like 'processing a set'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
index 7b3440b40a7..7d36e67ddbf 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::SlashCommands::Presenters::IssueMove do
let_it_be(:other_project) { create(:project) }
let_it_be(:old_issue, reload: true) { create(:issue, project: project) }
- let(:new_issue) { Issues::MoveService.new(project, user).execute(old_issue, other_project) }
+ let(:new_issue) { Issues::MoveService.new(project: project, current_user: user).execute(old_issue, other_project) }
let(:attachment) { subject[:attachments].first }
subject { described_class.new(new_issue).present(old_issue) }
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
new file mode 100644
index 00000000000..491e5e9a662
--- /dev/null
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Spamcheck::Client do
+ include_context 'includes Spam constants'
+
+ let(:endpoint) { 'grpc://grpc.test.url' }
+ let_it_be(:user) { create(:user, organization: 'GitLab') }
+ let(:verdict_value) { nil }
+ let(:error_value) { "" }
+
+ let(:attribs_value) do
+ extra_attributes = Google::Protobuf::Map.new(:string, :string)
+ extra_attributes["monitorMode"] = "false"
+ extra_attributes
+ end
+
+ let_it_be(:issue) { create(:issue, description: 'Test issue description') }
+
+ let(:response) do
+ verdict = ::Spamcheck::SpamVerdict.new
+ verdict.verdict = verdict_value
+ verdict.error = error_value
+ verdict.extra_attributes = attribs_value
+ verdict
+ end
+
+ subject { described_class.new.issue_spam?(spam_issue: issue, user: user) }
+
+ before do
+ stub_application_setting(spam_check_endpoint_url: endpoint)
+ end
+
+ describe '#issue_spam?' do
+ before do
+ allow_next_instance_of(::Spamcheck::SpamcheckService::Stub) do |instance|
+ allow(instance).to receive(:check_for_spam_issue).and_return(response)
+ end
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:verdict, :expected) do
+ ::Spamcheck::SpamVerdict::Verdict::ALLOW | Spam::SpamConstants::ALLOW
+ ::Spamcheck::SpamVerdict::Verdict::CONDITIONAL_ALLOW | Spam::SpamConstants::CONDITIONAL_ALLOW
+ ::Spamcheck::SpamVerdict::Verdict::DISALLOW | Spam::SpamConstants::DISALLOW
+ ::Spamcheck::SpamVerdict::Verdict::BLOCK | Spam::SpamConstants::BLOCK_USER
+ ::Spamcheck::SpamVerdict::Verdict::NOOP | Spam::SpamConstants::NOOP
+ end
+
+ with_them do
+ let(:verdict_value) { verdict }
+
+ it "returns expected spam constant" do
+ expect(subject).to eq([expected, { "monitorMode" => "false" }, ""])
+ end
+ end
+ end
+
+ describe "#build_issue_protobuf", :aggregate_failures do
+ it 'builds the expected protobuf object' do
+ cxt = { action: :create }
+ issue_pb = described_class.new.send(:build_issue_protobuf,
+ issue: issue, user: user,
+ context: cxt)
+ expect(issue_pb.title).to eq issue.title
+ expect(issue_pb.description).to eq issue.description
+ expect(issue_pb.user_in_project). to be false
+ expect(issue_pb.project.project_id).to eq issue.project_id
+ expect(issue_pb.created_at).to eq timestamp_to_protobuf_timestamp(issue.created_at)
+ expect(issue_pb.updated_at).to eq timestamp_to_protobuf_timestamp(issue.updated_at)
+ expect(issue_pb.action).to be ::Spamcheck::Action.lookup(::Spamcheck::Action::CREATE)
+ expect(issue_pb.user.username).to eq user.username
+ end
+ end
+
+ describe '#build_user_proto_buf', :aggregate_failures do
+ it 'builds the expected protobuf object' do
+ user_pb = described_class.new.send(:build_user_protobuf, user)
+ expect(user_pb.username).to eq user.username
+ expect(user_pb.org).to eq user.organization
+ expect(user_pb.created_at).to eq timestamp_to_protobuf_timestamp(user.created_at)
+ expect(user_pb.emails.count).to be 1
+ expect(user_pb.emails.first.email).to eq user.email
+ expect(user_pb.emails.first.verified).to eq user.confirmed?
+ end
+
+ context 'when user has multiple email addresses' do
+ let(:secondary_email) {create(:email, :confirmed, user: user)}
+
+ before do
+ user.emails << secondary_email
+ end
+
+ it 'adds emails to the user pb object' do
+ user_pb = described_class.new.send(:build_user_protobuf, user)
+ expect(user_pb.emails.count).to eq 2
+ expect(user_pb.emails.first.email).to eq user.email
+ expect(user_pb.emails.first.verified).to eq user.confirmed?
+ expect(user_pb.emails.last.email).to eq secondary_email.email
+ expect(user_pb.emails.last.verified).to eq secondary_email.confirmed?
+ end
+ end
+ end
+
+ describe "#build_project_protobuf", :aggregate_failures do
+ it 'builds the expected protobuf object' do
+ project_pb = described_class.new.send(:build_project_protobuf, issue)
+ expect(project_pb.project_id).to eq issue.project_id
+ expect(project_pb.project_path).to eq issue.project.full_path
+ end
+ end
+
+ private
+
+ def timestamp_to_protobuf_timestamp(timestamp)
+ Google::Protobuf::Timestamp.new(seconds: timestamp.to_time.to_i,
+ nanos: timestamp.to_time.nsec)
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
index 0b2055d3db5..8cd3feba339 100644
--- a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
@@ -54,13 +54,14 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
path,
'',
message: 'message',
- branch_name: 'master'
+ branch_name: ref
)
end
+ let(:ref) { 'main' }
let(:path) { 'README.md.erb' }
- it { is_expected.to include(is_supported_content: true) }
+ it { is_expected.to include(branch: ref, is_supported_content: true) }
end
context 'when file path is nested' do
@@ -69,7 +70,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
it { is_expected.to include(base_url: '/namespace/project/-/sse/master%2Flib%2FREADME.md') }
end
- context 'when branch is not master' do
+ context 'when branch is not master or main' do
let(:ref) { 'my-branch' }
it { is_expected.to include(is_supported_content: false) }
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index ad1affdac0b..ed551521b1d 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -2,43 +2,26 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::SubscriptionPortal do
- unless Gitlab.jh?
- describe '.default_subscriptions_url' do
- subject { described_class.default_subscriptions_url }
-
- context 'on non test and non dev environments' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
- end
-
- it 'returns production subscriptions app URL' do
- is_expected.to eq('https://customers.gitlab.com')
- end
- end
-
- context 'on dev environment' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
- end
-
- it 'returns staging subscriptions app url' do
- is_expected.to eq('https://customers.stg.gitlab.com')
- end
- end
+RSpec.describe ::Gitlab::SubscriptionPortal, skip: Gitlab.jh? do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:method_name, :test, :development, :result) do
+ :default_subscriptions_url | false | false | 'https://customers.gitlab.com'
+ :default_subscriptions_url | false | true | 'https://customers.stg.gitlab.com'
+ :default_subscriptions_url | true | false | 'https://customers.stg.gitlab.com'
+ :payment_form_url | false | false | 'https://customers.gitlab.com/payment_forms/cc_validation'
+ :payment_form_url | false | true | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
+ :payment_form_url | true | false | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
+ end
- context 'on test environment' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(true)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
- end
+ with_them do
+ subject { described_class.method(method_name).call }
- it 'returns staging subscriptions app url' do
- is_expected.to eq('https://customers.stg.gitlab.com')
- end
- end
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(test)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(development)
end
+
+ it { is_expected.to eq(result) }
end
end
diff --git a/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb b/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb
deleted file mode 100644
index d1024019a9f..00000000000
--- a/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Template::GitlabCiSyntaxYmlTemplate do
- subject { described_class }
-
- describe '#content' do
- it 'loads the full file' do
- template = subject.new(Rails.root.join('lib/gitlab/ci/syntax_templates/Artifacts example.gitlab-ci.yml'))
-
- expect(template.content).to start_with('#')
- end
- end
-
- it_behaves_like 'file template shared examples', 'Artifacts example', '.gitlab-ci.yml'
-end
diff --git a/spec/lib/gitlab/terraform_registry_token_spec.rb b/spec/lib/gitlab/terraform_registry_token_spec.rb
new file mode 100644
index 00000000000..49c1c07e942
--- /dev/null
+++ b/spec/lib/gitlab/terraform_registry_token_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::TerraformRegistryToken do
+ let_it_be(:user) { create(:user) }
+
+ describe '.from_token' do
+ let(:jwt_token) { described_class.from_token(token) }
+
+ subject { described_class.decode(jwt_token.encoded) }
+
+ context 'with a deploy token' do
+ let(:deploy_token) { create(:deploy_token, username: 'deployer') }
+ let(:token) { deploy_token }
+
+ it 'returns the correct token' do
+ expect(subject['token']).to eq jwt_token['token']
+ end
+ end
+
+ context 'with a job' do
+ let_it_be(:job) { create(:ci_build) }
+
+ let(:token) { job }
+
+ it 'returns the correct token' do
+ expect(subject['token']).to eq jwt_token['token']
+ end
+ end
+
+ context 'with a personal access token' do
+ let(:token) { create(:personal_access_token) }
+
+ it 'returns the correct token' do
+ expect(subject['token']).to eq jwt_token['token']
+ end
+ end
+ end
+
+ it_behaves_like 'a gitlab jwt token'
+end
diff --git a/spec/lib/gitlab/tracking/docs/helper_spec.rb b/spec/lib/gitlab/tracking/docs/helper_spec.rb
new file mode 100644
index 00000000000..5f7965502f1
--- /dev/null
+++ b/spec/lib/gitlab/tracking/docs/helper_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::Docs::Helper do
+ let_it_be(:klass) do
+ Class.new do
+ include Gitlab::Tracking::Docs::Helper
+ end
+ end
+
+ describe '#auto_generated_comment' do
+ it 'renders information about missing description' do
+ expect(klass.new.auto_generated_comment).to match /This documentation is auto generated by a script/
+ end
+ end
+
+ describe '#render_description' do
+ context 'description is empty' do
+ it 'renders information about missing description' do
+ object = double(description: '')
+
+ expect(klass.new.render_description(object)).to eq('Missing description')
+ end
+ end
+
+ context 'description is present' do
+ it 'render description' do
+ object = double(description: 'some description')
+
+ expect(klass.new.render_description(object)).to eq('some description')
+ end
+ end
+ end
+
+ describe '#render_event_taxonomy' do
+ it 'render table with event taxonomy' do
+ attributes = {
+ category: 'epics',
+ action: 'promote',
+ label: nil,
+ property_description: 'String with issue id',
+ value_description: 'Integer issue id'
+ }
+ object = double(attributes: attributes)
+ event_taxonomy = <<~MD.chomp
+ | category | action | label | property | value |
+ |---|---|---|---|---|
+ | `epics` | `promote` | `` | `String with issue id` | `Integer issue id` |
+ MD
+
+ expect(klass.new.render_event_taxonomy(object)).to eq(event_taxonomy)
+ end
+ end
+
+ describe '#md_link_to' do
+ it 'render link in md format' do
+ expect(klass.new.md_link_to('zelda', 'link')).to eq('[zelda](link)')
+ end
+ end
+
+ describe '#render_owner' do
+ it 'render information about group owning event' do
+ object = double(product_group: "group::product intelligence")
+
+ expect(klass.new.render_owner(object)).to eq("Owner: `group::product intelligence`")
+ end
+ end
+
+ describe '#render_tiers' do
+ it 'render information about tiers' do
+ object = double(tiers: %w[bronze silver gold])
+
+ expect(klass.new.render_tiers(object)).to eq("Tiers: `bronze`, `silver`, `gold`")
+ end
+ end
+
+ describe '#render_yaml_definition_path' do
+ it 'render relative location of yaml definition' do
+ object = double(yaml_path: 'config/events/button_click.yaml')
+
+ expect(klass.new.render_yaml_definition_path(object)).to eq("YAML definition: `config/events/button_click.yaml`")
+ end
+ end
+
+ describe '#backtick' do
+ it 'wraps string in backticks chars' do
+ expect(klass.new.backtick('test')).to eql("`test`")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/docs/renderer_spec.rb b/spec/lib/gitlab/tracking/docs/renderer_spec.rb
new file mode 100644
index 00000000000..386aea6c23a
--- /dev/null
+++ b/spec/lib/gitlab/tracking/docs/renderer_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::Docs::Renderer do
+ describe 'contents' do
+ let(:dictionary_path) { described_class::DICTIONARY_PATH }
+ let(:items) { Gitlab::Tracking::EventDefinition.definitions.first(10).to_h }
+
+ it 'generates dictionary for given items' do
+ generated_dictionary = described_class.new(items).contents
+ table_of_contents_items = items.values.map { |item| "#{item.category} #{item.action}"}
+
+ generated_dictionary_keys = RDoc::Markdown
+ .parse(generated_dictionary)
+ .table_of_contents
+ .select { |metric_doc| metric_doc.level == 3 }
+ .map { |item| item.text.match(%r{<code>(.*)</code>})&.captures&.first }
+
+ expect(generated_dictionary_keys).to match_array(table_of_contents_items)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/event_definition_spec.rb b/spec/lib/gitlab/tracking/event_definition_spec.rb
new file mode 100644
index 00000000000..51c62840819
--- /dev/null
+++ b/spec/lib/gitlab/tracking/event_definition_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::EventDefinition do
+ let(:attributes) do
+ {
+ description: 'Created issues',
+ category: 'issues',
+ action: 'create',
+ label_description: 'API',
+ property_description: 'The string "issue_id"',
+ value_description: 'ID of the issue',
+ extra_properties: { confidential: false },
+ product_category: 'collection',
+ product_stage: 'growth',
+ product_section: 'dev',
+ product_group: 'group::product analytics',
+ distribution: %w(ee ce),
+ tier: %w(free premium ultimate)
+ }
+ end
+
+ let(:path) { File.join('events', 'issues_create.yml') }
+ let(:definition) { described_class.new(path, attributes) }
+ let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+
+ def write_metric(metric, path, content)
+ path = File.join(metric, path)
+ dir = File.dirname(path)
+ FileUtils.mkdir_p(dir)
+ File.write(path, content)
+ end
+
+ it 'has all definitions valid' do
+ expect { described_class.definitions }.not_to raise_error(Gitlab::Tracking::InvalidEventError)
+ end
+
+ describe '#validate' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:attribute, :value) do
+ :description | 1
+ :category | nil
+ :action | nil
+ :label_description | 1
+ :property_description | 1
+ :value_description | 1
+ :extra_properties | 'smth'
+ :product_category | 1
+ :product_stage | 1
+ :product_section | nil
+ :product_group | nil
+ :distributions | %[be eb]
+ :tiers | %[pro]
+ end
+
+ with_them do
+ before do
+ attributes[attribute] = value
+ end
+
+ it 'raise exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Tracking::InvalidEventError))
+
+ described_class.new(path, attributes).validate!
+ end
+ end
+ end
+
+ describe '.definitions' do
+ let(:metric1) { Dir.mktmpdir('metric1') }
+ let(:metric2) { Dir.mktmpdir('metric2') }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(metric1, '**', '*.yml'),
+ File.join(metric2, '**', '*.yml')
+ ]
+ )
+ end
+
+ subject { described_class.definitions }
+
+ it 'has empty list when there are no definition files' do
+ is_expected.to be_empty
+ end
+
+ it 'has one metric when there is one file' do
+ write_metric(metric1, path, yaml_content)
+
+ is_expected.to be_one
+ end
+
+ after do
+ FileUtils.rm_rf(metric1)
+ FileUtils.rm_rf(metric2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index dacd08cf12b..289818266bd 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -57,6 +57,22 @@ RSpec.describe Gitlab::Tracking::StandardContext do
expect(snowplow_context.to_json.dig(:data, :source)).to eq(described_class::GITLAB_RAILS_SOURCE)
end
+ context 'plan' do
+ context 'when namespace is not available' do
+ it 'is nil' do
+ expect(snowplow_context.to_json.dig(:data, :plan)).to be_nil
+ end
+ end
+
+ context 'when namespace is available' do
+ subject { described_class.new(namespace: create(:namespace)) }
+
+ it 'contains plan name' do
+ expect(snowplow_context.to_json.dig(:data, :plan)).to eq(Plan::DEFAULT)
+ end
+ end
+ end
+
context 'with extra data' do
subject { described_class.new(extra_key_1: 'extra value 1', extra_key_2: 'extra value 2') }
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 4d856205609..994316f38ee 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -36,6 +36,8 @@ RSpec.describe Gitlab::Tracking do
end
describe '.event' do
+ let(:namespace) { create(:namespace) }
+
shared_examples 'delegates to destination' do |klass|
before do
allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow).to receive(:event)
@@ -47,7 +49,6 @@ RSpec.describe Gitlab::Tracking do
project = double(:project)
user = double(:user)
- namespace = double(:namespace)
expect(Gitlab::Tracking::StandardContext)
.to receive(:new)
diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb
index a86afa9cba5..3021d92244e 100644
--- a/spec/lib/gitlab/tree_summary_spec.rb
+++ b/spec/lib/gitlab/tree_summary_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::TreeSummary do
'long.txt',
'',
message: message,
- branch_name: project.default_branch_or_master
+ branch_name: project.default_branch
)
end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index e99d720058a..92e51b8ea23 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -25,6 +25,12 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
let(:definition) { described_class.new(path, attributes) }
let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+ around do |example|
+ described_class.instance_variable_set(:@definitions, nil)
+ example.run
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
def write_metric(metric, path, content)
path = File.join(metric, path)
dir = File.dirname(path)
@@ -32,6 +38,11 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
File.write(path, content)
end
+ after do
+ # Reset memoized `definitions` result
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
it 'has all definitons valid' do
expect { described_class.definitions }.not_to raise_error(Gitlab::Usage::Metric::InvalidMetricError)
end
@@ -62,6 +73,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:distribution | 'test'
:tier | %w(test ee)
:name | 'count_<adjective_describing>_boards'
+
+ :instrumentation_class | 'Metric_Class'
+ :instrumentation_class | 'metricClass'
end
with_them do
@@ -184,8 +198,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
File.join(metric2, '**', '*.yml')
]
)
- # Reset memoized `definitions` result
- described_class.instance_variable_set(:@definitions, nil)
end
after do
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
index 7d8e3056384..0fb3a69df05 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
@@ -65,76 +65,35 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
end
context 'there are aggregated metrics defined' do
+ let(:aggregated_metrics) do
+ [
+ aggregated_metric(name: "gmau_1", source: datasource, time_frame: time_frame, operator: operator)
+ ]
+ end
+
+ let(:results) { { 'gmau_1' => 5 } }
+ let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
+
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
end
end
- context 'with AND operator' do
- let(:aggregated_metrics) do
- params = { source: datasource, operator: "AND", time_frame: time_frame }
- [
- aggregated_metric(**params.merge(name: "gmau_1", events: %w[event3 event5])),
- aggregated_metric(**params.merge(name: "gmau_2"))
- ]
- end
-
- it 'returns the number of unique events recorded for every metric in aggregate', :aggregate_failures do
- results = {
- 'gmau_1' => 2,
- 'gmau_2' => 1
- }
- params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
-
- # gmau_1 data is as follow
- # |A| => 4
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(4)
- # |B| => 6
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event5')).and_return(6)
- # |A + B| => 8
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(8)
- # Exclusion inclusion principle formula to calculate intersection of 2 sets
- # |A & B| = (|A| + |B|) - |A + B| => (4 + 6) - 8 => 2
-
- # gmau_2 data is as follow:
- # |A| => 2
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event1')).and_return(2)
- # |B| => 3
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event2')).and_return(3)
- # |C| => 5
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(5)
-
- # |A + B| => 4 therefore |A & B| = (|A| + |B|) - |A + B| => 2 + 3 - 4 => 1
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2])).and_return(4)
- # |A + C| => 6 therefore |A & C| = (|A| + |C|) - |A + C| => 2 + 5 - 6 => 1
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event3])).and_return(6)
- # |B + C| => 7 therefore |B & C| = (|B| + |C|) - |B + C| => 3 + 5 - 7 => 1
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event2 event3])).and_return(7)
- # |A + B + C| => 8
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(8)
- # Exclusion inclusion principle formula to calculate intersection of 3 sets
- # |A & B & C| = (|A & B| + |A & C| + |B & C|) - (|A| + |B| + |C|) + |A + B + C|
- # (1 + 1 + 1) - (2 + 3 + 5) + 8 => 1
+ context 'with OR operator' do
+ let(:operator) { Gitlab::Usage::Metrics::Aggregates::UNION_OF_AGGREGATED_METRICS }
+ it 'returns the number of unique events occurred for any metric in aggregate', :aggregate_failures do
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
expect(aggregated_metrics_data).to eq(results)
end
end
- context 'with OR operator' do
- let(:aggregated_metrics) do
- [
- aggregated_metric(name: "gmau_1", source: datasource, time_frame: time_frame, operator: "OR")
- ]
- end
-
- it 'returns the number of unique events occurred for any metric in aggregate', :aggregate_failures do
- results = {
- 'gmau_1' => 5
- }
- params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
+ context 'with AND operator' do
+ let(:operator) { Gitlab::Usage::Metrics::Aggregates::INTERSECTION_OF_AGGREGATED_METRICS }
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
+ it 'returns the number of unique events that occurred for all of metrics in the aggregate', :aggregate_failures do
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_intersections).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
expect(aggregated_metrics_data).to eq(results)
end
end
@@ -331,36 +290,6 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
it_behaves_like 'database_sourced_aggregated_metrics'
it_behaves_like 'redis_sourced_aggregated_metrics'
it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature'
-
- context 'metrics union calls' do
- it 'caches intermediate operations', :aggregate_failures do
- events = %w[event1 event2 event3 event5]
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', events: events, operator: "AND", time_frame: time_frame)])
- end
-
- params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
-
- events.each do |event|
- expect(sources::RedisHll).to receive(:calculate_metrics_union)
- .with(params.merge(metric_names: event))
- .once
- .and_return(0)
- end
-
- 2.upto(4) do |subset_size|
- events.combination(subset_size).each do |events|
- expect(sources::RedisHll).to receive(:calculate_metrics_union)
- .with(params.merge(metric_names: events))
- .once
- .and_return(0)
- end
- end
-
- aggregated_metrics_data
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb
new file mode 100644
index 00000000000..41cb445155e
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::Calculations::Intersection do
+ let_it_be(:recorded_at) { Time.current.to_i }
+ let_it_be(:start_date) { 4.weeks.ago.to_date }
+ let_it_be(:end_date) { Date.current }
+
+ shared_examples 'aggregated_metrics_data with source' do
+ context 'with AND operator' do
+ let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
+
+ context 'with even number of metrics' do
+ it 'calculates intersection correctly', :aggregate_failures do
+ # gmau_1 data is as follow
+ # |A| => 4
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(4)
+ # |B| => 6
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event5')).and_return(6)
+ # |A + B| => 8
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(8)
+ # Exclusion inclusion principle formula to calculate intersection of 2 sets
+ # |A & B| = (|A| + |B|) - |A + B| => (4 + 6) - 8 => 2
+ expect(source.calculate_metrics_intersections(metric_names: %w[event3 event5], start_date: start_date, end_date: end_date, recorded_at: recorded_at)).to eq(2)
+ end
+ end
+
+ context 'with odd number of metrics' do
+ it 'calculates intersection correctly', :aggregate_failures do
+ # gmau_2 data is as follow:
+ # |A| => 2
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event1')).and_return(2)
+ # |B| => 3
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event2')).and_return(3)
+ # |C| => 5
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(5)
+
+ # |A + B| => 4 therefore |A & B| = (|A| + |B|) - |A + B| => 2 + 3 - 4 => 1
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2])).and_return(4)
+ # |A + C| => 6 therefore |A & C| = (|A| + |C|) - |A + C| => 2 + 5 - 6 => 1
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event3])).and_return(6)
+ # |B + C| => 7 therefore |B & C| = (|B| + |C|) - |B + C| => 3 + 5 - 7 => 1
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event2 event3])).and_return(7)
+ # |A + B + C| => 8
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(8)
+ # Exclusion inclusion principle formula to calculate intersection of 3 sets
+ # |A & B & C| = (|A & B| + |A & C| + |B & C|) - (|A| + |B| + |C|) + |A + B + C|
+ # (1 + 1 + 1) - (2 + 3 + 5) + 8 => 1
+ expect(source.calculate_metrics_intersections(metric_names: %w[event1 event2 event3], start_date: start_date, end_date: end_date, recorded_at: recorded_at)).to eq(1)
+ end
+ end
+ end
+ end
+
+ describe '.aggregated_metrics_data' do
+ let(:source) do
+ Class.new do
+ extend Gitlab::Usage::Metrics::Aggregates::Sources::Calculations::Intersection
+ end
+ end
+
+ it 'caches intermediate operations', :aggregate_failures do
+ events = %w[event1 event2 event3 event5]
+
+ params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
+
+ events.each do |event|
+ expect(source).to receive(:calculate_metrics_union)
+ .with(params.merge(metric_names: event))
+ .once
+ .and_return(0)
+ end
+
+ 2.upto(4) do |subset_size|
+ events.combination(subset_size).each do |events|
+ expect(source).to receive(:calculate_metrics_union)
+ .with(params.merge(metric_names: events))
+ .once
+ .and_return(0)
+ end
+ end
+
+ expect(source.calculate_metrics_intersections(metric_names: events, start_date: start_date, end_date: end_date, recorded_at: recorded_at)).to eq(0)
+ end
+
+ it_behaves_like 'aggregated_metrics_data with source'
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
index db878828cd6..1ae4c9414dd 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
@@ -12,11 +12,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_2) { 'metric_2' }
let(:metric_names) { [metric_1, metric_2] }
- describe '.calculate_events_union' do
- subject(:calculate_metrics_union) do
- described_class.calculate_metrics_union(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
- end
-
+ describe 'metric calculations' do
before do
[
{
@@ -36,23 +32,55 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
end
end
- it 'returns the number of unique events in the union of all metrics' do
- expect(calculate_metrics_union.round(2)).to eq(3.12)
- end
+ describe '.calculate_events_union' do
+ subject(:calculate_metrics_union) do
+ described_class.calculate_metrics_union(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ end
+
+ it 'returns the number of unique events in the union of all metrics' do
+ expect(calculate_metrics_union.round(2)).to eq(3.12)
+ end
+
+ context 'when there is no aggregated data saved' do
+ let(:metric_names) { [metric_1, 'i do not have any records'] }
+
+ it 'raises error when union data is missing' do
+ expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
+ end
+ end
- context 'when there is no aggregated data saved' do
- let(:metric_names) { [metric_1, 'i do not have any records'] }
+ context 'when there is only one metric defined as aggregated' do
+ let(:metric_names) { [metric_1] }
- it 'raises error when union data is missing' do
- expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
+ it 'returns the number of unique events for that metric' do
+ expect(calculate_metrics_union.round(2)).to eq(2.08)
+ end
end
end
- context 'when there is only one metric defined as aggregated' do
- let(:metric_names) { [metric_1] }
+ describe '.calculate_metrics_intersections' do
+ subject(:calculate_metrics_intersections) do
+ described_class.calculate_metrics_intersections(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ end
+
+ it 'returns the number of common events in the intersection of all metrics' do
+ expect(calculate_metrics_intersections.round(2)).to eq(1.04)
+ end
+
+ context 'when there is no aggregated data saved' do
+ let(:metric_names) { [metric_1, 'i do not have any records'] }
- it 'returns the number of unique events for that metric' do
- expect(calculate_metrics_union.round(2)).to eq(2.08)
+ it 'raises error when union data is missing' do
+ expect { calculate_metrics_intersections }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
+ end
+ end
+
+ context 'when there is only one metric defined in aggregate' do
+ let(:metric_names) { [metric_1] }
+
+ it 'returns the number of common/unique events for the intersection of that metric' do
+ expect(calculate_metrics_intersections.round(2)).to eq(2.08)
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb
index af2de5ea343..83b155b41b1 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll do
- describe '.calculate_events_union' do
- let(:event_names) { %w[event_a event_b] }
- let(:start_date) { 7.days.ago }
- let(:end_date) { Date.current }
+ let_it_be(:event_names) { %w[event_a event_b] }
+ let_it_be(:start_date) { 7.days.ago }
+ let_it_be(:end_date) { Date.current }
+ let_it_be(:recorded_at) { Time.current }
+ describe '.calculate_events_union' do
subject(:calculate_metrics_union) do
described_class.calculate_metrics_union(metric_names: event_names, start_date: start_date, end_date: end_date, recorded_at: nil)
end
@@ -26,4 +27,30 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll do
expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
end
end
+
+ describe '.calculate_metrics_intersections' do
+ subject(:calculate_metrics_intersections) do
+ described_class.calculate_metrics_intersections(metric_names: event_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ end
+
+ it 'uses values returned by union to compute the intersection' do
+ event_names.each do |event|
+ expect(Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll).to receive(:calculate_metrics_union)
+ .with(metric_names: event, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ .and_return(5)
+ end
+
+ expect(Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll).to receive(:calculate_metrics_union)
+ .with(metric_names: event_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ .and_return(2)
+
+ expect(calculate_metrics_intersections).to eq(8)
+ end
+
+ it 'raises error if union is < 0' do
+ allow(Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll).to receive(:calculate_metrics_union).and_raise(Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable)
+
+ expect { calculate_metrics_intersections }.to raise_error(Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable)
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb
new file mode 100644
index 00000000000..52c1ccdcd47
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountBoardsMetric do
+ let_it_be(:board) { create(:board) }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb
new file mode 100644
index 00000000000..c3b59904f41
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountIssuesMetric do
+ let_it_be(:issue) { create(:issue) }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb
new file mode 100644
index 00000000000..9f4686ab6cd
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersCreatingIssuesMetric do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:issues) { create_list(:issue, 2, author: author, created_at: 4.days.ago) }
+ let_it_be(:old_issue) { create(:issue, author: author, created_at: 2.months.ago) }
+
+ context 'with all time frame' do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1
+ end
+
+ context 'for 28d time frame' do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }, 1
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb
new file mode 100644
index 00000000000..7adba825a13
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersUsingApproveQuickActionMetric, :clean_gitlab_redis_shared_state do
+ before do
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 1.week.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 2.weeks.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.weeks.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.months.ago)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'redis_hll' }, 2
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', data_source: 'redis_hll' }, 1
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb
new file mode 100644
index 00000000000..83e07200025
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::HostnameMetric do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' }, Gitlab.config.gitlab.host
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb
new file mode 100644
index 00000000000..212dd3dc851
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UuidMetric do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }, Gitlab::CurrentSettings.uuid
+end
diff --git a/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb b/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb
new file mode 100644
index 00000000000..91c27825cce
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::KeyPathProcessor do
+ describe '#unflatten_default_path' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:key_path, :value, :expected_hash) do
+ 'uuid' | nil | { uuid: nil }
+ 'uuid' | '1111' | { uuid: '1111' }
+ 'counts.issues' | nil | { counts: { issues: nil } }
+ 'counts.issues' | 100 | { counts: { issues: 100 } }
+ 'usage_activity_by_stage.verify.ci_builds' | 100 | { usage_activity_by_stage: { verify: { ci_builds: 100 } } }
+ end
+
+ with_them do
+ subject { described_class.process(key_path, value) }
+
+ it { is_expected.to eq(expected_hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 82db3d94493..5f66387c82b 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -28,14 +28,6 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
it 'does not track edit actions if author is not present' do
expect(track_action(author: nil)).to be_nil
end
-
- context 'when feature flag track_editor_edit_actions is disabled' do
- it 'does not track edit actions' do
- stub_feature_flags(track_editor_edit_actions: false)
-
- expect(track_action(author: user1)).to be_nil
- end
- end
end
context 'for web IDE edit actions' do
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 9fc28f6c4ec..4efacae0a48 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -45,6 +45,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'quickactions',
'pipeline_authoring',
'epics_usage',
+ 'epic_boards_usage',
'secure'
)
end
diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
index 8f9a3e0cd9e..e7edb8b9cf1 100644
--- a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
@@ -3,21 +3,57 @@
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
- it_behaves_like 'a redis usage counter', 'Kubernetes Agent', :gitops_sync
+ described_class::KNOWN_EVENTS.each do |event|
+ it_behaves_like 'a redis usage counter', 'Kubernetes Agent', event
+ it_behaves_like 'a redis usage counter with totals', :kubernetes_agent, event => 1
+ end
+
+ describe '.increment_event_counts' do
+ let(:events) do
+ {
+ 'gitops_sync' => 1,
+ 'k8s_api_proxy_request' => 2
+ }
+ end
- it_behaves_like 'a redis usage counter with totals', :kubernetes_agent, gitops_sync: 1
+ subject { described_class.increment_event_counts(events) }
- describe '.increment_gitops_sync' do
- it 'increments the gtops_sync counter by the new increment amount' do
- described_class.increment_gitops_sync(7)
- described_class.increment_gitops_sync(2)
- described_class.increment_gitops_sync(0)
+ it 'increments the specified counters by the new increment amount' do
+ described_class.increment_event_counts(events)
+ described_class.increment_event_counts(events)
+ described_class.increment_event_counts(events)
- expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 9)
+ expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 3, kubernetes_agent_k8s_api_proxy_request: 6)
end
- it 'raises for negative numbers' do
- expect { described_class.increment_gitops_sync(-1) }.to raise_error(ArgumentError)
+ context 'event is unknown' do
+ let(:events) do
+ {
+ 'gitops_sync' => 1,
+ 'other_event' => 2
+ }
+ end
+
+ it 'raises an ArgumentError' do
+ expect(described_class).not_to receive(:increment_by)
+
+ expect { subject }.to raise_error(ArgumentError, 'unknown event other_event')
+ end
+ end
+
+ context 'increment is negative' do
+ let(:events) do
+ {
+ 'gitops_sync' => -1,
+ 'k8s_api_proxy_request' => 2
+ }
+ end
+
+ it 'raises an ArgumentError' do
+ expect(described_class).not_to receive(:increment_by)
+
+ expect { subject }.to raise_error(ArgumentError, 'gitops_sync count must be greater than or equal to zero')
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
index 1be2a83f98f..c484595ee71 100644
--- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red
end
it 'includes the right events' do
- expect(described_class::KNOWN_EVENTS.size).to eq 48
+ expect(described_class::KNOWN_EVENTS.size).to eq 51
end
described_class::KNOWN_EVENTS.each do |event|
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
new file mode 100644
index 00000000000..18acd767c6d
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataMetrics do
+ describe '.uncached_data' do
+ subject { described_class.uncached_data }
+
+ around do |example|
+ described_class.instance_variable_set(:@definitions, nil)
+ example.run
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'whith instrumentation_class' do
+ it 'includes top level keys' do
+ expect(subject).to include(:uuid)
+ expect(subject).to include(:hostname)
+ end
+
+ it 'includes counts keys' do
+ expect(subject[:counts]).to include(:boards)
+ end
+
+ it 'includes i_quickactions_approve monthly and weekly key' do
+ expect(subject[:redis_hll_counters][:quickactions]).to include(:i_quickactions_approve_monthly)
+ expect(subject[:redis_hll_counters][:quickactions]).to include(:i_quickactions_approve_weekly)
+ end
+
+ it 'includes counts keys' do
+ expect(subject[:counts]).to include(:issues)
+ end
+
+ it 'includes usage_activity_by_stage keys' do
+ expect(subject[:usage_activity_by_stage][:plan]).to include(:issues)
+ end
+
+ it 'includes usage_activity_by_stage_monthly keys' do
+ expect(subject[:usage_activity_by_stage_monthly][:plan]).to include(:issues)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 718ab3b2d95..695859c8e6e 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -59,6 +59,14 @@ RSpec.describe Gitlab::UsageDataQueries do
end
end
+ describe '.histogram' do
+ it 'returns the histogram sql' do
+ expect(described_class.histogram(AlertManagement::HttpIntegration.active,
+ :project_id, buckets: 1..2, bucket_size: 101))
+ .to match(/^WITH "count_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
+
describe 'min/max methods' do
it 'returns nil' do
# user min/max
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 01701f7aebd..d4b6ac09261 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -172,6 +172,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
another_project = create(:project, :repository, creator: another_user)
create(:remote_mirror, project: another_project, enabled: false)
create(:snippet, author: user)
+ create(:suggestion, note: create(:note, project: project))
end
expect(described_class.usage_activity_by_stage_create({})).to include(
@@ -181,7 +182,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 6,
remote_mirrors: 2,
- snippets: 2
+ snippets: 2,
+ suggestions: 2
)
expect(described_class.usage_activity_by_stage_create(described_class.last_28_days_time_period)).to include(
deploy_keys: 1,
@@ -190,7 +192,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 3,
remote_mirrors: 1,
- snippets: 1
+ snippets: 1,
+ suggestions: 1
)
end
end
@@ -571,7 +574,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
expect(count_data[:projects_with_tracing_enabled]).to eq(1)
- expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
expect(count_data[:projects_with_enabled_alert_integrations]).to eq(1)
expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
@@ -745,10 +747,34 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe '.runners_usage' do
+ before do
+ project = build(:project)
+ create_list(:ci_runner, 2, :instance_type, :online)
+ create(:ci_runner, :group, :online)
+ create(:ci_runner, :group, :inactive)
+ create_list(:ci_runner, 3, :project_type, :online, projects: [project])
+ end
+
+ subject { described_class.runners_usage }
+
+ it 'gathers runner usage counts correctly' do
+ expect(subject[:ci_runners]).to eq(7)
+ expect(subject[:ci_runners_instance_type_active]).to eq(2)
+ expect(subject[:ci_runners_group_type_active]).to eq(1)
+ expect(subject[:ci_runners_project_type_active]).to eq(3)
+
+ expect(subject[:ci_runners_instance_type_active_online]).to eq(2)
+ expect(subject[:ci_runners_group_type_active_online]).to eq(1)
+ expect(subject[:ci_runners_project_type_active_online]).to eq(3)
+ end
+ end
+
describe '.usage_counters' do
subject { described_class.usage_counters }
it { is_expected.to include(:kubernetes_agent_gitops_sync) }
+ it { is_expected.to include(:kubernetes_agent_k8s_api_proxy_request) }
it { is_expected.to include(:static_site_editor_views) }
it { is_expected.to include(:package_events_i_package_pull_package) }
it { is_expected.to include(:package_events_i_package_delete_package_by_user) }
@@ -1158,8 +1184,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe ".system_usage_data_settings" do
+ let(:prometheus_client) { double(Gitlab::PrometheusClient) }
+
before do
allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
+ expect(prometheus_client).to receive(:query).with(/gitlab_usage_ping:gitaly_apdex:ratio_avg_over_time_5m/).and_return([
+ {
+ 'metric' => {},
+ 'value' => [1616016381.473, '0.95']
+ }
+ ])
+ expect(described_class).to receive(:with_prometheus_client).and_yield(prometheus_client)
end
subject { described_class.system_usage_data_settings }
@@ -1171,6 +1206,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'populates operating system information' do
expect(subject[:settings][:operating_system]).to eq('ubuntu-20.04')
end
+
+ it 'gathers gitaly apdex', :aggregate_failures do
+ expect(subject[:settings][:gitaly_apdex]).to be_within(0.001).of(0.95)
+ end
end
end
@@ -1291,10 +1330,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
'p_analytics_repo' => 123,
'i_analytics_cohorts' => 123,
'i_analytics_dev_ops_score' => 123,
- 'i_analytics_dev_ops_adoption' => 123,
'i_analytics_instance_statistics' => 123,
'p_analytics_merge_request' => 123,
- 'g_analytics_merge_request' => 123,
+ 'i_analytics_dev_ops_adoption' => 123,
+ 'users_viewing_analytics_group_devops_adoption' => 123,
'analytics_unique_visits_for_any_target' => 543,
'analytics_unique_visits_for_any_target_monthly' => 987
}
@@ -1426,6 +1465,86 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe '.email_campaign_counts' do
+ subject { described_class.send(:email_campaign_counts) }
+
+ context 'when queries time out' do
+ before do
+ allow_any_instance_of(ActiveRecord::Relation)
+ .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ end
+
+ it 'returns -1 for email campaign data' do
+ expected_data = {
+ "in_product_marketing_email_create_0_sent" => -1,
+ "in_product_marketing_email_create_0_cta_clicked" => -1,
+ "in_product_marketing_email_create_1_sent" => -1,
+ "in_product_marketing_email_create_1_cta_clicked" => -1,
+ "in_product_marketing_email_create_2_sent" => -1,
+ "in_product_marketing_email_create_2_cta_clicked" => -1,
+ "in_product_marketing_email_verify_0_sent" => -1,
+ "in_product_marketing_email_verify_0_cta_clicked" => -1,
+ "in_product_marketing_email_verify_1_sent" => -1,
+ "in_product_marketing_email_verify_1_cta_clicked" => -1,
+ "in_product_marketing_email_verify_2_sent" => -1,
+ "in_product_marketing_email_verify_2_cta_clicked" => -1,
+ "in_product_marketing_email_trial_0_sent" => -1,
+ "in_product_marketing_email_trial_0_cta_clicked" => -1,
+ "in_product_marketing_email_trial_1_sent" => -1,
+ "in_product_marketing_email_trial_1_cta_clicked" => -1,
+ "in_product_marketing_email_trial_2_sent" => -1,
+ "in_product_marketing_email_trial_2_cta_clicked" => -1,
+ "in_product_marketing_email_team_0_sent" => -1,
+ "in_product_marketing_email_team_0_cta_clicked" => -1,
+ "in_product_marketing_email_team_1_sent" => -1,
+ "in_product_marketing_email_team_1_cta_clicked" => -1,
+ "in_product_marketing_email_team_2_sent" => -1,
+ "in_product_marketing_email_team_2_cta_clicked" => -1
+ }
+
+ expect(subject).to eq(expected_data)
+ end
+ end
+
+ context 'when there are entries' do
+ before do
+ create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.zone.now)
+ create(:in_product_marketing_email, track: :verify, series: 0)
+ end
+
+ it 'gathers email campaign data' do
+ expected_data = {
+ "in_product_marketing_email_create_0_sent" => 1,
+ "in_product_marketing_email_create_0_cta_clicked" => 1,
+ "in_product_marketing_email_create_1_sent" => 0,
+ "in_product_marketing_email_create_1_cta_clicked" => 0,
+ "in_product_marketing_email_create_2_sent" => 0,
+ "in_product_marketing_email_create_2_cta_clicked" => 0,
+ "in_product_marketing_email_verify_0_sent" => 1,
+ "in_product_marketing_email_verify_0_cta_clicked" => 0,
+ "in_product_marketing_email_verify_1_sent" => 0,
+ "in_product_marketing_email_verify_1_cta_clicked" => 0,
+ "in_product_marketing_email_verify_2_sent" => 0,
+ "in_product_marketing_email_verify_2_cta_clicked" => 0,
+ "in_product_marketing_email_trial_0_sent" => 0,
+ "in_product_marketing_email_trial_0_cta_clicked" => 0,
+ "in_product_marketing_email_trial_1_sent" => 0,
+ "in_product_marketing_email_trial_1_cta_clicked" => 0,
+ "in_product_marketing_email_trial_2_sent" => 0,
+ "in_product_marketing_email_trial_2_cta_clicked" => 0,
+ "in_product_marketing_email_team_0_sent" => 0,
+ "in_product_marketing_email_team_0_cta_clicked" => 0,
+ "in_product_marketing_email_team_1_sent" => 0,
+ "in_product_marketing_email_team_1_cta_clicked" => 0,
+ "in_product_marketing_email_team_2_sent" => 0,
+ "in_product_marketing_email_team_2_cta_clicked" => 0
+ }
+
+ expect(subject).to eq(expected_data)
+ end
+ end
+ end
+
describe '.snowplow_event_counts' do
let_it_be(:time_period) { { collector_tstamp: 8.days.ago..1.day.ago } }