summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 11:10:13 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-06-20 11:10:13 +0000
commit0ea3fcec397b69815975647f5e2aa5fe944a8486 (patch)
tree7979381b89d26011bcf9bdc989a40fcc2f1ed4ff /spec/lib
parent72123183a20411a36d607d70b12d57c484394c8e (diff)
downloadgitlab-ce-0ea3fcec397b69815975647f5e2aa5fe944a8486.tar.gz
Add latest changes from gitlab-org/gitlab@15-1-stable-eev15.1.0-rc42
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/ci/job_request/image_spec.rb16
-rw-r--r--spec/lib/api/entities/personal_access_token_with_details_spec.rb29
-rw-r--r--spec/lib/api/entities/wiki_page_spec.rb17
-rw-r--r--spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb49
-rw-r--r--spec/lib/api/helpers/sse_helpers_spec.rb44
-rw-r--r--spec/lib/api/helpers_spec.rb249
-rw-r--r--spec/lib/api/integrations/slack/events/url_verification_spec.rb11
-rw-r--r--spec/lib/atlassian/jira_connect/client_spec.rb39
-rw-r--r--spec/lib/backup/manager_spec.rb55
-rw-r--r--spec/lib/backup/repositories_spec.rb96
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb22
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb11
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb10
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb93
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb171
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb84
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb40
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb169
-rw-r--r--spec/lib/bulk_imports/projects/stage_spec.rb77
-rw-r--r--spec/lib/container_registry/migration_spec.rb61
-rw-r--r--spec/lib/error_tracking/stacktrace_builder_spec.rb95
-rw-r--r--spec/lib/generators/gitlab/usage_metric_generator_spec.rb28
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb20
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb31
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb6
-rw-r--r--spec/lib/gitlab/audit/unauthenticated_author_spec.rb6
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb44
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb4
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb124
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb104
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb80
-rw-r--r--spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb297
-rw-r--r--spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb43
-rw-r--r--spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb54
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/changes_access_spec.rb40
-rw-r--r--spec/lib/gitlab/checks/single_change_access_spec.rb16
-rw-r--r--spec/lib/gitlab/checks/tag_check_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/build/image_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb66
-rw-r--r--spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb87
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb104
-rw-r--r--spec/lib/gitlab/ci/reports/coverage_report_spec.rb (renamed from spec/lib/gitlab/ci/reports/coverage_reports_spec.rb)16
-rw-r--r--spec/lib/gitlab/ci/runner_upgrade_check_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/status/build/play_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/status/build/scheduled_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/trace/archive_spec.rb71
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb47
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb10
-rw-r--r--spec/lib/gitlab/daemon_spec.rb37
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb39
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb7
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb85
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb54
-rw-r--r--spec/lib/gitlab/database/each_database_spec.rb6
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb6
-rw-r--r--spec/lib/gitlab/database/load_balancing/configuration_spec.rb61
-rw-r--r--spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb55
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb40
-rw-r--r--spec/lib/gitlab/database/load_balancing/setup_spec.rb149
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb4
-rw-r--r--spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb81
-rw-r--r--spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb31
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb46
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb216
-rw-r--r--spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb17
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_manager_spec.rb34
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb68
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb73
-rw-r--r--spec/lib/gitlab/database/shared_model_spec.rb13
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb1
-rw-r--r--spec/lib/gitlab/database_spec.rb32
-rw-r--r--spec/lib/gitlab/diff/custom_diff_spec.rb115
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb36
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb134
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb29
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb10
-rw-r--r--spec/lib/gitlab/email/message/repository_push_spec.rb2
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb58
-rw-r--r--spec/lib/gitlab/email/reply_parser_spec.rb67
-rw-r--r--spec/lib/gitlab/email/service_desk_receiver_spec.rb12
-rw-r--r--spec/lib/gitlab/error_tracking/logger_spec.rb21
-rw-r--r--spec/lib/gitlab/event_store/store_spec.rb18
-rw-r--r--spec/lib/gitlab/fogbugz_import/project_creator_spec.rb9
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb269
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb3
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb22
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb10
-rw-r--r--spec/lib/gitlab/git_access_spec.rb2
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb36
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb115
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb230
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb56
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb72
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb32
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb40
-rw-r--r--spec/lib/gitlab/graphql/negatable_arguments_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/present/field_extension_spec.rb17
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb51
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb72
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb49
-rw-r--r--spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb6
-rw-r--r--spec/lib/gitlab/hash_digest/facade_spec.rb36
-rw-r--r--spec/lib/gitlab/highlight_spec.rb15
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml7
-rw-r--r--spec/lib/gitlab/import_export/lfs_saver_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml5
-rw-r--r--spec/lib/gitlab/import_sources_spec.rb24
-rw-r--r--spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb46
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb8
-rw-r--r--spec/lib/gitlab/jira_import/base_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import/labels_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb22
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb2
-rw-r--r--spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb92
-rw-r--r--spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb74
-rw-r--r--spec/lib/gitlab/memory/jemalloc_spec.rb121
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb38
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/sli_spec.rb58
-rw-r--r--spec/lib/gitlab/middleware/compressed_json_spec.rb9
-rw-r--r--spec/lib/gitlab/pages_transfer_spec.rb157
-rw-r--r--spec/lib/gitlab/patch/database_config_spec.rb3
-rw-r--r--spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb67
-rw-r--r--spec/lib/gitlab/project_template_spec.rb13
-rw-r--r--spec/lib/gitlab/protocol_access_spec.rb50
-rw-r--r--spec/lib/gitlab/rack_attack_spec.rb4
-rw-r--r--spec/lib/gitlab/redis/duplicate_jobs_spec.rb94
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb924
-rw-r--r--spec/lib/gitlab/redis/sidekiq_status_spec.rb68
-rw-r--r--spec/lib/gitlab/regex_requires_app_spec.rb90
-rw-r--r--spec/lib/gitlab/regex_spec.rb120
-rw-r--r--spec/lib/gitlab/render_timeout_spec.rb25
-rw-r--r--spec/lib/gitlab/seeder_spec.rb2
-rw-r--r--spec/lib/gitlab/service_desk_email_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb22
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb649
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb30
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb22
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb203
-rw-r--r--spec/lib/gitlab/slash_commands/deploy_spec.rb2
-rw-r--r--spec/lib/gitlab/sql/cte_spec.rb19
-rw-r--r--spec/lib/gitlab/ssh/signature_spec.rb227
-rw-r--r--spec/lib/gitlab/ssh_public_key_spec.rb101
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb245
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb38
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb101
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb53
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb50
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config_spec.rb87
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb127
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb46
-rw-r--r--spec/lib/gitlab/themes_spec.rb4
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb28
-rw-r--r--spec/lib/gitlab/updated_notes_paginator_spec.rb57
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb25
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb62
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb15
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb75
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb31
-rw-r--r--spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb12
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb3
-rw-r--r--spec/lib/gitlab/usage/metrics/query_spec.rb6
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb3
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb14
-rw-r--r--spec/lib/gitlab/usage_data_counters_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb5
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb21
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb122
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb31
-rw-r--r--spec/lib/gitlab/web_hooks/rate_limiter_spec.rb123
-rw-r--r--spec/lib/marginalia_spec.rb2
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb98
-rw-r--r--spec/lib/security/ci_configuration/sast_build_action_spec.rb80
-rw-r--r--spec/lib/service_ping/build_payload_spec.rb31
-rw-r--r--spec/lib/service_ping/permit_data_categories_spec.rb20
195 files changed, 7320 insertions, 3550 deletions
diff --git a/spec/lib/api/entities/ci/job_request/image_spec.rb b/spec/lib/api/entities/ci/job_request/image_spec.rb
index 55aade03129..3ab14ffc3ae 100644
--- a/spec/lib/api/entities/ci/job_request/image_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/image_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe API::Entities::Ci::JobRequest::Image do
let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }]}
- let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports)}
+ let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports, pull_policy: ['if-not-present']) }
let(:entity) { described_class.new(image) }
subject { entity.as_json }
@@ -28,4 +28,18 @@ RSpec.describe API::Entities::Ci::JobRequest::Image do
expect(subject[:ports]).to be_nil
end
end
+
+ it 'returns the pull policy' do
+ expect(subject[:pull_policy]).to eq(['if-not-present'])
+ end
+
+ context 'when the FF ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'does not return the pull policy' do
+ expect(subject).not_to have_key(:pull_policy)
+ end
+ end
end
diff --git a/spec/lib/api/entities/personal_access_token_with_details_spec.rb b/spec/lib/api/entities/personal_access_token_with_details_spec.rb
new file mode 100644
index 00000000000..a53d6febba1
--- /dev/null
+++ b/spec/lib/api/entities/personal_access_token_with_details_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::PersonalAccessTokenWithDetails do
+ describe '#as_json' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user, expires_at: nil) }
+
+ let(:entity) { described_class.new(token) }
+
+ it 'returns token data' do
+ expect(entity.as_json).to eq({
+ id: token.id,
+ name: token.name,
+ revoked: false,
+ created_at: token.created_at,
+ scopes: ['api'],
+ user_id: user.id,
+ last_used_at: nil,
+ active: true,
+ expires_at: nil,
+ expired: false,
+ expires_soon: false,
+ revoke_path: Gitlab::Routing.url_helpers.revoke_profile_personal_access_token_path(token)
+ })
+ end
+ end
+end
diff --git a/spec/lib/api/entities/wiki_page_spec.rb b/spec/lib/api/entities/wiki_page_spec.rb
index 238c8233a14..c75bba12484 100644
--- a/spec/lib/api/entities/wiki_page_spec.rb
+++ b/spec/lib/api/entities/wiki_page_spec.rb
@@ -43,6 +43,23 @@ RSpec.describe API::Entities::WikiPage do
expect(subject[:content]).to eq("<div>&#x000A;<p><strong>Test</strong> <em>content</em></p>&#x000A;</div>")
end
end
+
+ context 'when content contains a reference' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:issue) { create(:issue, project: project) }
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'page_with_ref', content: issue.to_reference) }
+ let(:expected_content) { %r{<a href=".*#{issue.iid}".*>#{issue.to_reference}</a>} }
+
+ before do
+ params[:current_user] = user
+ project.add_developer(user)
+ end
+
+ it 'expands the reference in the content' do
+ expect(subject[:content]).to match(expected_content)
+ end
+ end
end
context 'when it is false' do
diff --git a/spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb b/spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb
new file mode 100644
index 00000000000..ae5c21e01c3
--- /dev/null
+++ b/spec/lib/api/helpers/project_stats_refresh_conflicts_helpers_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::ProjectStatsRefreshConflictsHelpers do
+ let_it_be(:project) { create(:project) }
+
+ let(:api_class) do
+ Class.new do
+ include API::Helpers::ProjectStatsRefreshConflictsHelpers
+ end
+ end
+
+ let(:api_controller) { api_class.new }
+
+ describe '#reject_if_build_artifacts_size_refreshing!' do
+ let(:entrypoint) { '/some/thing' }
+
+ before do
+ allow(project).to receive(:refreshing_build_artifacts_size?).and_return(refreshing)
+ allow(api_controller).to receive_message_chain(:request, :path).and_return(entrypoint)
+ end
+
+ context 'when project is undergoing stats refresh' do
+ let(:refreshing) { true }
+
+ it 'logs and returns a 409 conflict error' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger)
+ .to receive(:warn_request_rejected_during_stats_refresh)
+ .with(project.id)
+
+ expect(api_controller).to receive(:conflict!)
+
+ api_controller.reject_if_build_artifacts_size_refreshing!(project)
+ end
+ end
+
+ context 'when project is not undergoing stats refresh' do
+ let(:refreshing) { false }
+
+ it 'does nothing' do
+ expect(Gitlab::ProjectStatsRefreshConflictsLogger).not_to receive(:warn_request_rejected_during_stats_refresh)
+ expect(api_controller).not_to receive(:conflict)
+
+ api_controller.reject_if_build_artifacts_size_refreshing!(project)
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/sse_helpers_spec.rb b/spec/lib/api/helpers/sse_helpers_spec.rb
deleted file mode 100644
index 397051d9142..00000000000
--- a/spec/lib/api/helpers/sse_helpers_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe API::Helpers::SSEHelpers do
- include Gitlab::Routing
-
- let_it_be(:project) { create(:project) }
-
- subject { Class.new.include(described_class).new }
-
- describe '#request_from_sse?' do
- before do
- allow(subject).to receive(:request).and_return(request)
- end
-
- context 'when referer is nil' do
- let(:request) { double(referer: nil)}
-
- it 'returns false' do
- expect(URI).not_to receive(:parse)
- expect(subject.request_from_sse?(project)).to eq false
- end
- end
-
- context 'when referer is not from SSE' do
- let(:request) { double(referer: 'https://gitlab.com')}
-
- it 'returns false' do
- expect(URI).to receive(:parse).and_call_original
- expect(subject.request_from_sse?(project)).to eq false
- end
- end
-
- context 'when referer is from SSE' do
- let(:request) { double(referer: project_show_sse_path(project, 'master/README.md'))}
-
- it 'returns true' do
- expect(URI).to receive(:parse).and_call_original
- expect(subject.request_from_sse?(project)).to eq true
- end
- end
- end
-end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 78ce9642392..23c97e2c0a3 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Helpers do
using RSpec::Parameterized::TableSyntax
- subject { Class.new.include(described_class).new }
+ subject(:helper) { Class.new.include(described_class).new }
describe '#current_user' do
include Rack::Test::Methods
@@ -69,17 +69,17 @@ RSpec.describe API::Helpers do
shared_examples 'project finder' do
context 'when project exists' do
it 'returns requested project' do
- expect(subject.find_project(existing_id)).to eq(project)
+ expect(helper.find_project(existing_id)).to eq(project)
end
it 'returns nil' do
- expect(subject.find_project(non_existing_id)).to be_nil
+ expect(helper.find_project(non_existing_id)).to be_nil
end
end
context 'when project id is not provided' do
it 'returns nil' do
- expect(subject.find_project(nil)).to be_nil
+ expect(helper.find_project(nil)).to be_nil
end
end
end
@@ -105,7 +105,7 @@ RSpec.describe API::Helpers do
it 'does not hit the database' do
expect(Project).not_to receive(:find_by_full_path)
- subject.find_project(non_existing_id)
+ helper.find_project(non_existing_id)
end
end
end
@@ -116,7 +116,7 @@ RSpec.describe API::Helpers do
it 'does not return the project pending delete' do
expect(Project).not_to receive(:find_by_full_path)
- expect(subject.find_project(project_pending_delete.id)).to be_nil
+ expect(helper.find_project(project_pending_delete.id)).to be_nil
end
end
@@ -126,7 +126,7 @@ RSpec.describe API::Helpers do
it 'does not return the hidden project' do
expect(Project).not_to receive(:find_by_full_path)
- expect(subject.find_project(hidden_project.id)).to be_nil
+ expect(helper.find_project(hidden_project.id)).to be_nil
end
end
end
@@ -138,25 +138,25 @@ RSpec.describe API::Helpers do
shared_examples 'private project without access' do
before do
project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
it 'returns not found' do
- expect(subject).to receive(:not_found!)
+ expect(helper).to receive(:not_found!)
- subject.find_project!(project.id)
+ helper.find_project!(project.id)
end
end
context 'when user is authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:initial_current_user).and_return(user)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
end
context 'public project' do
it 'returns requested project' do
- expect(subject.find_project!(project.id)).to eq(project)
+ expect(helper.find_project!(project.id)).to eq(project)
end
end
@@ -167,13 +167,13 @@ RSpec.describe API::Helpers do
context 'when user is not authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(nil)
- allow(subject).to receive(:initial_current_user).and_return(nil)
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:initial_current_user).and_return(nil)
end
context 'public project' do
it 'returns requested project' do
- expect(subject.find_project!(project.id)).to eq(project)
+ expect(helper.find_project!(project.id)).to eq(project)
end
end
@@ -188,21 +188,21 @@ RSpec.describe API::Helpers do
let(:user) { project.first_owner}
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:authorized_project_scope?).and_return(true)
- allow(subject).to receive(:job_token_authentication?).and_return(false)
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:authorized_project_scope?).and_return(true)
+ allow(helper).to receive(:job_token_authentication?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
shared_examples 'project finder' do
context 'when project exists' do
it 'returns requested project' do
- expect(subject.find_project!(existing_id)).to eq(project)
+ expect(helper.find_project!(existing_id)).to eq(project)
end
it 'returns nil' do
- expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
- expect(subject.find_project!(non_existing_id)).to be_nil
+ expect(helper).to receive(:render_api_error!).with('404 Project Not Found', 404)
+ expect(helper.find_project!(non_existing_id)).to be_nil
end
end
end
@@ -227,9 +227,9 @@ RSpec.describe API::Helpers do
it 'does not hit the database' do
expect(Project).not_to receive(:find_by_full_path)
- expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
+ expect(helper).to receive(:render_api_error!).with('404 Project Not Found', 404)
- subject.find_project!(non_existing_id)
+ helper.find_project!(non_existing_id)
end
end
end
@@ -243,25 +243,25 @@ RSpec.describe API::Helpers do
shared_examples 'private group without access' do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
it 'returns not found' do
- expect(subject).to receive(:not_found!)
+ expect(helper).to receive(:not_found!)
- subject.find_group!(group.id)
+ helper.find_group!(group.id)
end
end
context 'when user is authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:initial_current_user).and_return(user)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group!(group.id)).to eq(group)
+ expect(helper.find_group!(group.id)).to eq(group)
end
end
@@ -272,13 +272,13 @@ RSpec.describe API::Helpers do
context 'when user is not authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(nil)
- allow(subject).to receive(:initial_current_user).and_return(nil)
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:initial_current_user).and_return(nil)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group!(group.id)).to eq(group)
+ expect(helper.find_group!(group.id)).to eq(group)
end
end
@@ -293,21 +293,21 @@ RSpec.describe API::Helpers do
let(:user) { group.first_owner }
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:authorized_project_scope?).and_return(true)
- allow(subject).to receive(:job_token_authentication?).and_return(false)
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:authorized_project_scope?).and_return(true)
+ allow(helper).to receive(:job_token_authentication?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
shared_examples 'group finder' do
context 'when group exists' do
it 'returns requested group' do
- expect(subject.find_group!(existing_id)).to eq(group)
+ expect(helper.find_group!(existing_id)).to eq(group)
end
it 'returns nil' do
- expect(subject).to receive(:render_api_error!).with('404 Group Not Found', 404)
- expect(subject.find_group!(non_existing_id)).to be_nil
+ expect(helper).to receive(:render_api_error!).with('404 Group Not Found', 404)
+ expect(helper.find_group!(non_existing_id)).to be_nil
end
end
end
@@ -335,25 +335,25 @@ RSpec.describe API::Helpers do
shared_examples 'private group without access' do
before do
group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
- allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ allow(helper).to receive(:authenticate_non_public?).and_return(false)
end
it 'returns not found' do
- expect(subject).to receive(:not_found!)
+ expect(helper).to receive(:not_found!)
- subject.find_group_by_full_path!(group.full_path)
+ helper.find_group_by_full_path!(group.full_path)
end
end
context 'when user is authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(user)
- allow(subject).to receive(:initial_current_user).and_return(user)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:initial_current_user).and_return(user)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
+ expect(helper.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
@@ -367,7 +367,7 @@ RSpec.describe API::Helpers do
end
it 'returns requested group with access' do
- expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
+ expect(helper.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
end
@@ -375,13 +375,13 @@ RSpec.describe API::Helpers do
context 'when user is not authenticated' do
before do
- allow(subject).to receive(:current_user).and_return(nil)
- allow(subject).to receive(:initial_current_user).and_return(nil)
+ allow(helper).to receive(:current_user).and_return(nil)
+ allow(helper).to receive(:initial_current_user).and_return(nil)
end
context 'public group' do
it 'returns requested group' do
- expect(subject.find_group_by_full_path!(group.full_path)).to eq(group)
+ expect(helper.find_group_by_full_path!(group.full_path)).to eq(group)
end
end
@@ -397,13 +397,13 @@ RSpec.describe API::Helpers do
shared_examples 'namespace finder' do
context 'when namespace exists' do
it 'returns requested namespace' do
- expect(subject.find_namespace(existing_id)).to eq(namespace)
+ expect(helper.find_namespace(existing_id)).to eq(namespace)
end
end
context "when namespace doesn't exists" do
it 'returns nil' do
- expect(subject.find_namespace(non_existing_id)).to be_nil
+ expect(helper.find_namespace(non_existing_id)).to be_nil
end
end
end
@@ -427,9 +427,9 @@ RSpec.describe API::Helpers do
let(:user1) { create(:user) }
before do
- allow(subject).to receive(:current_user).and_return(user1)
- allow(subject).to receive(:header).and_return(nil)
- allow(subject).to receive(:not_found!).and_raise('404 Namespace not found')
+ allow(helper).to receive(:current_user).and_return(user1)
+ allow(helper).to receive(:header).and_return(nil)
+ allow(helper).to receive(:not_found!).and_raise('404 Namespace not found')
end
context 'when namespace is group' do
@@ -477,7 +477,7 @@ RSpec.describe API::Helpers do
describe '#find_namespace!' do
let(:namespace_finder) do
- subject.find_namespace!(namespace.id)
+ helper.find_namespace!(namespace.id)
end
it_behaves_like 'user namespace finder'
@@ -488,7 +488,7 @@ RSpec.describe API::Helpers do
let_it_be(:other_project) { create(:project) }
let_it_be(:job) { create(:ci_build) }
- let(:send_authorized_project_scope) { subject.authorized_project_scope?(project) }
+ let(:send_authorized_project_scope) { helper.authorized_project_scope?(project) }
where(:job_token_authentication, :route_setting, :feature_flag, :same_job_project, :expected_result) do
false | false | false | false | true
@@ -511,9 +511,9 @@ RSpec.describe API::Helpers do
with_them do
before do
- allow(subject).to receive(:job_token_authentication?).and_return(job_token_authentication)
- allow(subject).to receive(:route_authentication_setting).and_return(job_token_scope: route_setting ? :project : nil)
- allow(subject).to receive(:current_authenticated_job).and_return(job)
+ allow(helper).to receive(:job_token_authentication?).and_return(job_token_authentication)
+ allow(helper).to receive(:route_authentication_setting).and_return(job_token_scope: route_setting ? :project : nil)
+ allow(helper).to receive(:current_authenticated_job).and_return(job)
allow(job).to receive(:project).and_return(same_job_project ? project : other_project)
stub_feature_flags(ci_job_token_scope: false)
@@ -531,15 +531,15 @@ RSpec.describe API::Helpers do
let(:blob) { double(name: 'foobar') }
let(:send_git_blob) do
- subject.send(:send_git_blob, repository, blob)
- subject.header
+ helper.send(:send_git_blob, repository, blob)
+ helper.header
end
before do
- allow(subject).to receive(:env).and_return({})
- allow(subject).to receive(:content_type)
- allow(subject).to receive(:header).and_return({})
- allow(subject).to receive(:body).and_return('')
+ allow(helper).to receive(:env).and_return({})
+ allow(helper).to receive(:content_type)
+ allow(helper).to receive(:header).and_return({})
+ allow(helper).to receive(:body).and_return('')
allow(Gitlab::Workhorse).to receive(:send_git_blob)
end
@@ -572,19 +572,19 @@ RSpec.describe API::Helpers do
it 'tracks redis hll event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
- subject.increment_unique_values(event_name, value)
+ helper.increment_unique_values(event_name, value)
end
it 'logs an exception for unknown event' do
expect(Gitlab::AppLogger).to receive(:warn).with("Redis tracking event failed for event: #{unknown_event}, message: Unknown event #{unknown_event}")
- subject.increment_unique_values(unknown_event, value)
+ helper.increment_unique_values(unknown_event, value)
end
it 'does not track event for nil values' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- subject.increment_unique_values(unknown_event, nil)
+ helper.increment_unique_values(unknown_event, nil)
end
end
@@ -639,18 +639,6 @@ RSpec.describe API::Helpers do
it 'converts to id' do
is_expected.to eq({ 'id' => 'asc' })
end
-
- context 'when replace_order_by_created_at_with_id feature flag is disabled' do
- before do
- stub_feature_flags(replace_order_by_created_at_with_id: false)
- end
-
- include_examples '#order_options_with_tie_breaker'
-
- it 'maintains created_at order' do
- is_expected.to eq({ 'created_at' => 'asc', 'id' => 'asc' })
- end
- end
end
end
@@ -659,21 +647,21 @@ RSpec.describe API::Helpers do
context 'when unmodified check passes' do
before do
- allow(subject).to receive(:check_unmodified_since!).with(project.updated_at).and_return(true)
+ allow(helper).to receive(:check_unmodified_since!).with(project.updated_at).and_return(true)
end
it 'destroys given project' do
- allow(subject).to receive(:status).with(204)
- allow(subject).to receive(:body).with(false)
+ allow(helper).to receive(:status).with(204)
+ allow(helper).to receive(:body).with(false)
expect(project).to receive(:destroy).and_call_original
- expect { subject.destroy_conditionally!(project) }.to change(Project, :count).by(-1)
+ expect { helper.destroy_conditionally!(project) }.to change(Project, :count).by(-1)
end
end
context 'when unmodified check fails' do
before do
- allow(subject).to receive(:check_unmodified_since!).with(project.updated_at).and_throw(:error)
+ allow(helper).to receive(:check_unmodified_since!).with(project.updated_at).and_throw(:error)
end
# #destroy_conditionally! uses Grape errors which Ruby-throws a symbol, shifting execution to somewhere else.
@@ -683,7 +671,7 @@ RSpec.describe API::Helpers do
it 'does not destroy given project' do
expect(project).not_to receive(:destroy)
- expect { subject.destroy_conditionally!(project) }.to throw_symbol(:error).and change { Project.count }.by(0)
+ expect { helper.destroy_conditionally!(project) }.to throw_symbol(:error).and change { Project.count }.by(0)
end
end
end
@@ -692,30 +680,30 @@ RSpec.describe API::Helpers do
let(:unmodified_since_header) { Time.now.change(usec: 0) }
before do
- allow(subject).to receive(:headers).and_return('If-Unmodified-Since' => unmodified_since_header.to_s)
+ allow(helper).to receive(:headers).and_return('If-Unmodified-Since' => unmodified_since_header.to_s)
end
context 'when last modified is later than header value' do
it 'renders error' do
- expect(subject).to receive(:render_api_error!)
+ expect(helper).to receive(:render_api_error!)
- subject.check_unmodified_since!(unmodified_since_header + 1.hour)
+ helper.check_unmodified_since!(unmodified_since_header + 1.hour)
end
end
context 'when last modified is earlier than header value' do
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(unmodified_since_header - 1.hour)
+ helper.check_unmodified_since!(unmodified_since_header - 1.hour)
end
end
context 'when last modified is equal to header value' do
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(unmodified_since_header)
+ helper.check_unmodified_since!(unmodified_since_header)
end
end
@@ -723,9 +711,9 @@ RSpec.describe API::Helpers do
let(:unmodified_since_header) { nil }
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(Time.now)
+ helper.check_unmodified_since!(Time.now)
end
end
@@ -733,9 +721,9 @@ RSpec.describe API::Helpers do
let(:unmodified_since_header) { "abcd" }
it 'does not render error' do
- expect(subject).not_to receive(:render_api_error!)
+ expect(helper).not_to receive(:render_api_error!)
- subject.check_unmodified_since!(Time.now)
+ helper.check_unmodified_since!(Time.now)
end
end
end
@@ -810,14 +798,71 @@ RSpec.describe API::Helpers do
before do
u = current_user_set ? user : nil
- subject.instance_variable_set(:@current_user, u)
+ helper.instance_variable_set(:@current_user, u)
- allow(subject).to receive(:params).and_return(params)
+ allow(helper).to receive(:params).and_return(params)
end
it 'returns the expected result' do
- expect(subject.order_by_similarity?(allow_unauthorized: allow_unauthorized)).to eq(expected)
+ expect(helper.order_by_similarity?(allow_unauthorized: allow_unauthorized)).to eq(expected)
end
end
end
+
+ describe '#render_api_error_with_reason!' do
+ before do
+ allow(helper).to receive(:env).and_return({})
+ allow(helper).to receive(:header).and_return({})
+ allow(helper).to receive(:error!)
+ end
+
+ it 'renders error with code' do
+ expect(helper).to receive(:set_status_code_in_env).with(999)
+ expect(helper).to receive(:error!).with({ 'message' => 'a message - good reason' }, 999, {})
+
+ helper.render_api_error_with_reason!(999, 'a message', 'good reason')
+ end
+ end
+
+ describe '#unauthorized!' do
+ it 'renders 401' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(401, '401 Unauthorized', nil)
+
+ helper.unauthorized!
+ end
+
+ it 'renders 401 with a reason' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(401, '401 Unauthorized', 'custom reason')
+
+ helper.unauthorized!('custom reason')
+ end
+ end
+
+ describe '#forbidden!' do
+ it 'renders 401' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(403, '403 Forbidden', nil)
+
+ helper.forbidden!
+ end
+
+ it 'renders 401 with a reason' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(403, '403 Forbidden', 'custom reason')
+
+ helper.forbidden!('custom reason')
+ end
+ end
+
+ describe '#bad_request!' do
+ it 'renders 400' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(400, '400 Bad request', nil)
+
+ helper.bad_request!
+ end
+
+ it 'renders 401 with a reason' do
+ expect(helper).to receive(:render_api_error_with_reason!).with(400, '400 Bad request', 'custom reason')
+
+ helper.bad_request!('custom reason')
+ end
+ end
end
diff --git a/spec/lib/api/integrations/slack/events/url_verification_spec.rb b/spec/lib/api/integrations/slack/events/url_verification_spec.rb
new file mode 100644
index 00000000000..2778f0d708d
--- /dev/null
+++ b/spec/lib/api/integrations/slack/events/url_verification_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Integrations::Slack::Events::UrlVerification do
+ describe '.call' do
+ it 'returns the challenge' do
+ expect(described_class.call({ challenge: 'foo' })).to eq({ challenge: 'foo' })
+ end
+ end
+end
diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb
index dd3130c78bf..0ae0f02c46e 100644
--- a/spec/lib/atlassian/jira_connect/client_spec.rb
+++ b/spec/lib/atlassian/jira_connect/client_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Atlassian::JiraConnect::Client do
include StubRequests
- subject { described_class.new('https://gitlab-test.atlassian.net', 'sample_secret') }
+ subject(:client) { described_class.new('https://gitlab-test.atlassian.net', 'sample_secret') }
let_it_be(:project) { create_default(:project, :repository) }
let_it_be(:mrs_by_title) { create_list(:merge_request, 4, :unique_branches, :jira_title) }
@@ -413,4 +413,41 @@ RSpec.describe Atlassian::JiraConnect::Client do
expect { subject.send(:store_dev_info, project: project, merge_requests: merge_requests) }.not_to exceed_query_limit(control_count)
end
end
+
+ describe '#user_info' do
+ let(:account_id) { '12345' }
+ let(:response_body) do
+ {
+ groups: {
+ items: [
+ { name: 'site-admins' }
+ ]
+ }
+ }.to_json
+ end
+
+ before do
+ stub_full_request("https://gitlab-test.atlassian.net/rest/api/3/user?accountId=#{account_id}&expand=groups")
+ .to_return(status: response_status, body: response_body, headers: { 'Content-Type': 'application/json' })
+ end
+
+ context 'with a successful response' do
+ let(:response_status) { 200 }
+
+ it 'returns a JiraUser instance' do
+ jira_user = client.user_info(account_id)
+
+ expect(jira_user).to be_a(Atlassian::JiraConnect::JiraUser)
+ expect(jira_user).to be_site_admin
+ end
+ end
+
+ context 'with a failed response' do
+ let(:response_status) { 401 }
+
+ it 'returns nil' do
+ expect(client.user_info(account_id)).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index a2477834dde..519d414f643 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Backup::Manager do
# is trying to display a diff and `File.exist?` is stubbed. Adding a
# default stub fixes this.
allow(File).to receive(:exist?).and_call_original
+ allow(FileUtils).to receive(:rm_rf).and_call_original
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
@@ -171,12 +172,14 @@ RSpec.describe Backup::Manager do
allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'), full_backup_id)
end
- it 'executes tar' do
+ it 'creates a backup tar' do
travel_to(backup_time) do
subject.create # rubocop:disable Rails/SaveBang
-
- expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
end
+
+ expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'when BACKUP is set' do
@@ -203,6 +206,8 @@ RSpec.describe Backup::Manager do
end.to raise_error(Backup::Error, 'Backup failed')
expect(Gitlab::BackupLogger).to have_received(:info).with(message: "Creating archive #{pack_tar_file} failed")
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -597,6 +602,7 @@ RSpec.describe Backup::Manager do
skipped: 'tar',
tar_version: be_a(String)
)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -697,6 +703,8 @@ RSpec.describe Backup::Manager do
expect(Kernel).to have_received(:system).with(*unpack_tar_cmdline)
expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'untar fails' do
@@ -724,6 +732,8 @@ RSpec.describe Backup::Manager do
end.to raise_error(Backup::Error, 'Backup failed')
expect(Gitlab::BackupLogger).to have_received(:info).with(message: "Creating archive #{pack_tar_file} failed")
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -786,6 +796,8 @@ RSpec.describe Backup::Manager do
expect(Kernel).to have_received(:system).with(*unpack_tar_cmdline)
expect(Kernel).to have_received(:system).with(*pack_tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'untar fails' do
@@ -817,6 +829,8 @@ RSpec.describe Backup::Manager do
end.to raise_error(Backup::Error, 'Backup failed')
expect(Gitlab::BackupLogger).to have_received(:info).with(message: "Creating archive #{pack_tar_file} failed")
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
end
@@ -1001,6 +1015,8 @@ RSpec.describe Backup::Manager do
subject.restore
expect(Kernel).to have_received(:system).with(*tar_cmdline)
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'backup_information.yml'))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'tar fails' do
@@ -1031,22 +1047,6 @@ RSpec.describe Backup::Manager do
.with(a_string_matching('GitLab version mismatch'))
end
end
-
- describe 'tmp files' do
- let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
-
- before do
- allow(FileUtils).to receive(:rm_rf).and_call_original
- end
-
- it 'removes backups/tmp dir' do
- expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
-
- subject.restore
-
- expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting backups/tmp ... ')
- end
- end
end
context 'when there is a non-tarred backup in the directory' do
@@ -1066,6 +1066,7 @@ RSpec.describe Backup::Manager do
expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found '))
+ expect(FileUtils).to have_received(:rm_rf).with(File.join(Gitlab.config.backup.path, 'tmp'))
end
context 'on version mismatch' do
@@ -1082,22 +1083,6 @@ RSpec.describe Backup::Manager do
.with(a_string_matching('GitLab version mismatch'))
end
end
-
- describe 'tmp files' do
- let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
-
- before do
- allow(FileUtils).to receive(:rm_rf).and_call_original
- end
-
- it 'removes backups/tmp dir' do
- expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
-
- subject.restore
-
- expect(Gitlab::BackupLogger).to have_received(:info).with(message: 'Deleting backups/tmp ... ')
- end
- end
end
end
end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
index 1581e4793e3..8bcf1e46c33 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Backup::Repositories do
let(:progress) { spy(:stdout) }
let(:strategy) { spy(:strategy) }
let(:storages) { [] }
+ let(:paths) { [] }
let(:destination) { 'repositories' }
let(:backup_id) { 'backup_id' }
@@ -13,7 +14,8 @@ RSpec.describe Backup::Repositories do
described_class.new(
progress,
strategy: strategy,
- storages: storages
+ storages: storages,
+ paths: paths
)
end
@@ -107,6 +109,52 @@ RSpec.describe Backup::Repositories do
expect(strategy).to have_received(:finish!)
end
end
+
+ describe 'paths' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ context 'project path' do
+ let(:paths) { [project.full_path] }
+
+ it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.dump(destination, backup_id)
+
+ expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+
+ context 'group path' do
+ let(:paths) { [project.namespace.full_path] }
+
+ it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.dump(destination, backup_id)
+
+ expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+ end
end
describe '#restore' do
@@ -138,7 +186,7 @@ RSpec.describe Backup::Repositories do
expect(pool_repository.object_pool.exists?).to be(true)
end
- it 'skips pools with no source project, :sidekiq_might_not_need_inline' do
+ it 'skips pools when no source project is found', :sidekiq_might_not_need_inline do
pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil)
@@ -208,5 +256,49 @@ RSpec.describe Backup::Repositories do
expect(strategy).to have_received(:finish!)
end
end
+
+ context 'paths' do
+ context 'project path' do
+ let(:paths) { [project.full_path] }
+
+ it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.restore(destination)
+
+ expect(strategy).to have_received(:start).with(:restore, destination)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+
+ context 'group path' do
+ let(:paths) { [project.namespace.full_path] }
+
+ it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
+ excluded_project = create(:project, :repository)
+ excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
+ excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
+
+ subject.restore(destination)
+
+ expect(strategy).to have_received(:start).with(:restore, destination)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
+ expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:finish!)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index c6f0e592cdf..d17deaa4736 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -93,7 +93,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Issue #{reference}")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'includes a data-project attribute' do
@@ -112,6 +112,14 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
expect(link.attr('data-issue')).to eq issue.id.to_s
end
+ it 'includes data attributes for issuable popover' do
+ doc = reference_filter("See #{reference}")
+ link = doc.css('a').first
+
+ expect(link.attr('data-project-path')).to eq project.full_path
+ expect(link.attr('data-iid')).to eq issue.iid.to_s
+ end
+
it 'includes a data-original attribute' do
doc = reference_filter("See #{reference}")
link = doc.css('a').first
@@ -201,7 +209,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'ignores invalid issue IDs on the referenced project' do
@@ -253,7 +261,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'ignores invalid issue IDs on the referenced project' do
@@ -305,7 +313,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
it 'ignores invalid issue IDs on the referenced project' do
@@ -347,7 +355,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
end
@@ -378,7 +386,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference_link}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
end
@@ -409,7 +417,7 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
it 'includes default classes' do
doc = reference_filter("Fixed (#{reference_link}.)")
- expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue'
end
end
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index e5809ac6949..42e8cf1c857 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -77,9 +77,9 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(reference_filter(act).to_html).to eq exp
end
- it 'has no title' do
+ it 'has the MR title in the title attribute' do
doc = reference_filter("Merge #{reference}")
- expect(doc.css('a').first.attr('title')).to eq ""
+ expect(doc.css('a').first.attr('title')).to eq(merge.title)
end
it 'escapes the title attribute' do
@@ -169,7 +169,6 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(link.attr('data-project')).to eq project2.id.to_s
expect(link.attr('data-project-path')).to eq project2.full_path
expect(link.attr('data-iid')).to eq merge.iid.to_s
- expect(link.attr('data-mr-title')).to eq merge.title
end
it 'ignores invalid merge IDs on the referenced project' do
@@ -273,12 +272,6 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(doc.text).to eq("See #{mr.to_reference(full: true)} (#{commit.short_id})")
end
- it 'has valid title attribute' do
- doc = reference_filter("See #{reference}")
-
- expect(doc.css('a').first.attr('title')).to eq(commit.title)
- end
-
it 'ignores invalid commit short_ids on link text' do
invalidate_commit_reference =
urls.project_merge_request_url(mr.project, mr) + "/diffs?commit_id=12345678"
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 16c958ec10b..33adca0ddfc 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre><code>def fun end</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", ""
@@ -59,7 +59,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre lang="gnuplot"><code>This is a test</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="gnuplot" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", "gnuplot"
@@ -130,13 +130,13 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "includes it in the highlighted code block" do
result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre><copy-code></copy-code></div>')
end
it "escape sourcepos metadata to prevent XSS" do
result = filter('<pre data-sourcepos="&#34;%22 href=&#34;x&#34;></pre><base href=http://unsafe-website.com/><pre x=&#34;"><code></code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos=\'"%22 href="x"&gt;&lt;/pre&gt;&lt;base href=http://unsafe-website.com/&gt;&lt;pre x="\' class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre data-sourcepos=\'"%22 href="x"&gt;&lt;/pre&gt;&lt;base href=http://unsafe-website.com/&gt;&lt;pre x="\' class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" data-canonical-lang="" v-pre="true"><code></code></pre><copy-code></copy-code></div>')
end
end
@@ -150,7 +150,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre lang="ruby"><code>This is a test</code></pre>')
- expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>')
+ expect(result.to_html.delete("\n")).to eq('<div class="gl-relative markdown-code-block js-markdown-code"><pre class="code highlight js-syntax-highlight" lang="" data-canonical-lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre><copy-code></copy-code></div>')
end
include_examples "XSS prevention", "ruby"
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 8ce25ff87d7..528d65615b1 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -4,47 +4,86 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Stage do
let(:ancestor) { create(:group) }
- let(:group) { create(:group, parent: ancestor) }
+ let(:group) { build(:group, parent: ancestor) }
let(:bulk_import) { build(:bulk_import) }
- let(:entity) { build(:bulk_import_entity, bulk_import: bulk_import, group: group, destination_namespace: ancestor.full_path) }
-
- let(:pipelines) do
- [
- [0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::GroupAttributesPipeline],
- [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
- [1, BulkImports::Groups::Pipelines::NamespaceSettingsPipeline],
- [1, BulkImports::Common::Pipelines::MembersPipeline],
- [1, BulkImports::Common::Pipelines::LabelsPipeline],
- [1, BulkImports::Common::Pipelines::MilestonesPipeline],
- [1, BulkImports::Common::Pipelines::BadgesPipeline],
- [2, BulkImports::Common::Pipelines::BoardsPipeline],
- [2, BulkImports::Common::Pipelines::UploadsPipeline]
- ]
+ let(:entity) do
+ build(:bulk_import_entity, bulk_import: bulk_import, group: group, destination_namespace: ancestor.full_path)
end
it 'raises error when initialized without a BulkImport' do
- expect { described_class.new({}) }.to raise_error(ArgumentError, 'Expected an argument of type ::BulkImports::Entity')
+ expect { described_class.new({}) }.to raise_error(
+ ArgumentError, 'Expected an argument of type ::BulkImports::Entity'
+ )
end
- describe '.pipelines' do
- it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.new(entity).pipelines & pipelines).to contain_exactly(*pipelines)
- expect(described_class.new(entity).pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
+ describe '#pipelines' do
+ it 'lists all the pipelines' do
+ pipelines = described_class.new(entity).pipelines
+
+ expect(pipelines).to include(
+ hash_including({
+ pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
+ stage: 0
+ }),
+ hash_including({
+ pipeline: BulkImports::Groups::Pipelines::GroupAttributesPipeline,
+ stage: 1
+ })
+ )
+ expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
+ end
+
+ it 'only has pipelines with valid keys' do
+ pipeline_keys = described_class.new(entity).pipelines.collect(&:keys).flatten.uniq
+ allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
+
+ expect(pipeline_keys - allowed_keys).to be_empty
+ end
+
+ it 'only has pipelines with valid versions' do
+ pipelines = described_class.new(entity).pipelines
+ minimum_source_versions = pipelines.collect { _1[:minimum_source_version] }.flatten.compact
+ maximum_source_versions = pipelines.collect { _1[:maximum_source_version] }.flatten.compact
+ version_regex = /^(\d+)\.(\d+)\.0$/
+
+ expect(minimum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ expect(maximum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ end
+
+ context 'when stages are out of order in the config hash' do
+ it 'lists all the pipelines ordered by stage' do
+ allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ a: { stage: 2 },
+ b: { stage: 1 },
+ c: { stage: 0 },
+ d: { stage: 2 }
+ }
+ )
+ end
+
+ expected_stages = described_class.new(entity).pipelines.collect { _1[:stage] }
+ expect(expected_stages).to eq([0, 1, 2, 2])
+ end
end
context 'when bulk_import_projects feature flag is enabled' do
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: true)
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
context 'when feature flag is enabled on root ancestor level' do
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: ancestor)
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
@@ -54,7 +93,9 @@ RSpec.describe BulkImports::Groups::Stage do
entity = create(:bulk_import_entity, destination_namespace: '')
- expect(described_class.new(entity).pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
end
@@ -63,7 +104,9 @@ RSpec.describe BulkImports::Groups::Stage do
it 'does not include project entities pipeline' do
stub_feature_flags(bulk_import_projects: false)
- expect(described_class.new(entity).pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline)
+ expect(described_class.new(entity).pipelines).not_to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
new file mode 100644
index 00000000000..39b539ece21
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb
@@ -0,0 +1,171 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline do
+ let_it_be(:design) { create(:design, :with_file) }
+
+ let(:portable) { create(:project) }
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:design_bundle_path) { File.join(tmpdir, 'design.bundle') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ design.repository.bundle_to_disk(design_bundle_path)
+
+ allow(portable).to receive(:lfs_enabled?).and_return(true)
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ it 'imports design repository into destination project and removes tmpdir' do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [design_bundle_path]))
+
+ expect(portable.design_repository).to receive(:create_from_bundle).with(design_bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(portable.design_repository.exists?).to eq(true)
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts design bundle filepath' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=design",
+ tmpdir: tmpdir,
+ filename: 'design.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'design.tar.gz')
+ .and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'design.tar')
+ .and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(design_bundle_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [design_bundle_path]))
+ end
+
+ it 'creates design repository from bundle' do
+ expect(portable.design_repository).to receive(:create_from_bundle).with(design_bundle_path).and_call_original
+
+ pipeline.load(context, design_bundle_path)
+
+ expect(portable.design_repository.exists?).to eq(true)
+ end
+
+ context 'when lfs is disabled' do
+ it 'returns' do
+ allow(portable).to receive(:lfs_enabled?).and_return(false)
+
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, design_bundle_path)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when file does not exist' do
+ it 'returns' do
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, File.join(tmpdir, 'bogus'))
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is directory' do
+ it 'returns' do
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, tmpdir)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, design_bundle_path), symlink)
+
+ expect(portable.design_repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, symlink)
+
+ expect(portable.design_repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }
+ .to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }
+ .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
index aa9c7486c27..4320d5dc119 100644
--- a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -54,17 +54,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
subject(:pipeline) { described_class.new(context) }
- before do
- allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
- end
-
- after do
- FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
- end
-
describe '#run' do
before do
- allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
+ allow_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
+ allow(extractor).to receive(:extract).and_return(
+ BulkImports::Pipeline::ExtractedData.new(data: project_attributes)
+ )
+ end
pipeline.run
end
@@ -84,46 +80,6 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
end
end
- describe '#extract' do
- before do
- file_download_service = instance_double("BulkImports::FileDownloadService")
- file_decompression_service = instance_double("BulkImports::FileDecompressionService")
-
- expect(BulkImports::FileDownloadService)
- .to receive(:new)
- .with(
- configuration: context.configuration,
- relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
- tmpdir: tmpdir,
- filename: 'self.json.gz')
- .and_return(file_download_service)
-
- expect(BulkImports::FileDecompressionService)
- .to receive(:new)
- .with(tmpdir: tmpdir, filename: 'self.json.gz')
- .and_return(file_decompression_service)
-
- expect(file_download_service).to receive(:execute)
- expect(file_decompression_service).to receive(:execute)
- end
-
- it 'downloads, decompresses & decodes json' do
- allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
-
- extracted_data = pipeline.extract(context)
-
- expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
- end
-
- context 'when json parsing error occurs' do
- it 'raises an error' do
- allow(pipeline).to receive(:json_attributes).and_return("invalid")
-
- expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
- end
- end
- end
-
describe '#transform' do
it 'removes prohibited attributes from hash' do
input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
@@ -145,35 +101,13 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
end
end
- describe '#json_attributes' do
- it 'reads raw json from file' do
- filepath = File.join(tmpdir, 'self.json')
-
- FileUtils.touch(filepath)
- expect_file_read(filepath)
-
- pipeline.json_attributes
- end
- end
-
describe '#after_run' do
- it 'removes tmp dir' do
- allow(FileUtils).to receive(:remove_entry).and_call_original
- expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+ it 'calls extractor#remove_tmpdir' do
+ expect_next_instance_of(BulkImports::Common::Extractors::JsonExtractor) do |extractor|
+ expect(extractor).to receive(:remove_tmpdir)
+ end
pipeline.after_run(nil)
-
- expect(Dir.exist?(tmpdir)).to eq(false)
- end
-
- context 'when dir does not exist' do
- it 'does not attempt to remove tmpdir' do
- FileUtils.remove_entry(tmpdir)
-
- expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
-
- pipeline.after_run(nil)
- end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
index 2279e66720e..2633598b48d 100644
--- a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
@@ -31,7 +31,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
'created_at' => '2019-12-26T10:17:14.621Z',
'updated_at' => '2019-12-26T10:17:14.621Z',
'released_at' => '2019-12-26T10:17:14.615Z',
- 'sha' => '901de3a8bd5573f4a049b1457d28bc1592ba6bf9'
+ 'sha' => '901de3a8bd5573f4a049b1457d28bc1592ba6bf9',
+ 'author_id' => user.id
}.merge(attributes)
end
@@ -45,11 +46,11 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [with_index]))
end
-
- pipeline.run
end
it 'imports release into destination project' do
+ pipeline.run
+
expect(project.releases.count).to eq(1)
imported_release = project.releases.last
@@ -62,6 +63,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
expect(imported_release.updated_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.released_at.to_s).to eq('2019-12-26 10:17:14 UTC')
expect(imported_release.sha).to eq(release['sha'])
+ expect(imported_release.author_id).to eq(release['author_id'])
end
end
@@ -78,6 +80,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
let(:attributes) {{ 'links' => [link] }}
it 'restores release links' do
+ pipeline.run
+
release_link = project.releases.last.links.first
aggregate_failures do
@@ -105,6 +109,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
let(:attributes) {{ 'milestone_releases' => [{ 'milestone' => milestone }] }}
it 'restores release milestone' do
+ pipeline.run
+
release_milestone = project.releases.last.milestone_releases.first.milestone
aggregate_failures do
@@ -118,5 +124,33 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
end
end
end
+
+ context 'evidences' do
+ it 'creates release evidence' do
+ expect(::Releases::CreateEvidenceWorker).to receive(:perform_async)
+
+ pipeline.run
+ end
+
+ context 'when release is historical' do
+ let(:attributes) {{ 'released_at' => '2018-12-26T10:17:14.621Z' }}
+
+ it 'does not create release evidence' do
+ expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
+
+ pipeline.run
+ end
+ end
+
+ context 'when release is upcoming' do
+ let(:attributes) {{ 'released_at' => Time.zone.now + 30.days }}
+
+ it 'does not create release evidence' do
+ expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
+
+ pipeline.run
+ end
+ end
+ end
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
new file mode 100644
index 00000000000..712c37ee578
--- /dev/null
+++ b/spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb
@@ -0,0 +1,169 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline do
+ let_it_be(:source) { create(:project, :repository) }
+
+ let(:portable) { create(:project) }
+ let(:tmpdir) { Dir.mktmpdir }
+ let(:bundle_path) { File.join(tmpdir, 'repository.bundle') }
+ let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
+ let(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject(:pipeline) { described_class.new(context) }
+
+ before do
+ source.repository.bundle_to_disk(bundle_path)
+
+ allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
+ end
+
+ describe '#run' do
+ before do
+ allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [bundle_path]))
+ end
+
+ it 'imports repository into destination project and removes tmpdir' do
+ expect(portable.repository).to receive(:create_from_bundle).with(bundle_path).and_call_original
+
+ pipeline.run
+
+ expect(portable.repository.exists?).to eq(true)
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when something goes wrong during import' do
+ it 'marks entity as failed' do
+ allow(pipeline).to receive(:load).and_raise(StandardError)
+
+ pipeline.run
+
+ expect(entity.failed?).to eq(true)
+ end
+ end
+ end
+
+ describe '#extract' do
+ it 'downloads & extracts repository bundle filepath' do
+ download_service = instance_double("BulkImports::FileDownloadService")
+ decompression_service = instance_double("BulkImports::FileDecompressionService")
+ extraction_service = instance_double("BulkImports::ArchiveExtractionService")
+
+ expect(BulkImports::FileDownloadService)
+ .to receive(:new)
+ .with(
+ configuration: context.configuration,
+ relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=repository",
+ tmpdir: tmpdir,
+ filename: 'repository.tar.gz')
+ .and_return(download_service)
+ expect(BulkImports::FileDecompressionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'repository.tar.gz')
+ .and_return(decompression_service)
+ expect(BulkImports::ArchiveExtractionService)
+ .to receive(:new)
+ .with(tmpdir: tmpdir, filename: 'repository.tar')
+ .and_return(extraction_service)
+
+ expect(download_service).to receive(:execute)
+ expect(decompression_service).to receive(:execute)
+ expect(extraction_service).to receive(:execute)
+
+ extracted_data = pipeline.extract(context)
+
+ expect(extracted_data.data).to contain_exactly(bundle_path)
+ end
+ end
+
+ describe '#load' do
+ before do
+ allow(pipeline)
+ .to receive(:extract)
+ .and_return(BulkImports::Pipeline::ExtractedData.new(data: [bundle_path]))
+ end
+
+ it 'creates repository from bundle' do
+ expect(portable.repository).to receive(:create_from_bundle).with(bundle_path).and_call_original
+
+ pipeline.load(context, bundle_path)
+
+ expect(portable.repository.exists?).to eq(true)
+ end
+
+ context 'when file does not exist' do
+ it 'returns' do
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, File.join(tmpdir, 'bogus'))
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is directory' do
+ it 'returns' do
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, tmpdir)
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is symlink' do
+ it 'returns' do
+ symlink = File.join(tmpdir, 'symlink')
+
+ FileUtils.ln_s(File.join(tmpdir, bundle_path), symlink)
+
+ expect(portable.repository).not_to receive(:create_from_bundle)
+
+ pipeline.load(context, symlink)
+
+ expect(portable.repository.exists?).to eq(false)
+ end
+ end
+
+ context 'when path is not under tmpdir' do
+ it 'returns' do
+ expect { pipeline.load(context, '/home/test.txt') }
+ .to raise_error(StandardError, 'path /home/test.txt is not allowed')
+ end
+ end
+
+ context 'when path is being traversed' do
+ it 'raises an error' do
+ expect { pipeline.load(context, File.join(tmpdir, '..')) }
+ .to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
+ end
+ end
+ end
+
+ describe '#after_run' do
+ it 'removes tmpdir' do
+ allow(FileUtils).to receive(:remove_entry).and_call_original
+ expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
+
+ pipeline.after_run(nil)
+
+ expect(Dir.exist?(tmpdir)).to eq(false)
+ end
+
+ context 'when tmpdir does not exist' do
+ it 'does not attempt to remove tmpdir' do
+ FileUtils.remove_entry(tmpdir)
+
+ expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
+
+ pipeline.after_run(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index e81d9cc5fb4..fc670d10655 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -2,38 +2,7 @@
require 'spec_helper'
-# Any new stages must be added to
-# `ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb` as well.
RSpec.describe BulkImports::Projects::Stage do
- let(:pipelines) do
- [
- [0, BulkImports::Projects::Pipelines::ProjectPipeline],
- [1, BulkImports::Projects::Pipelines::RepositoryPipeline],
- [1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
- [2, BulkImports::Common::Pipelines::LabelsPipeline],
- [2, BulkImports::Common::Pipelines::MilestonesPipeline],
- [2, BulkImports::Common::Pipelines::BadgesPipeline],
- [3, BulkImports::Projects::Pipelines::IssuesPipeline],
- [3, BulkImports::Projects::Pipelines::SnippetsPipeline],
- [4, BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline],
- [4, BulkImports::Common::Pipelines::BoardsPipeline],
- [4, BulkImports::Projects::Pipelines::MergeRequestsPipeline],
- [4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline],
- [4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline],
- [4, BulkImports::Projects::Pipelines::ProjectFeaturePipeline],
- [4, BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeline],
- [4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
- [4, BulkImports::Projects::Pipelines::ReleasesPipeline],
- [5, BulkImports::Projects::Pipelines::CiPipelinesPipeline],
- [5, BulkImports::Common::Pipelines::WikiPipeline],
- [5, BulkImports::Common::Pipelines::UploadsPipeline],
- [5, BulkImports::Common::Pipelines::LfsObjectsPipeline],
- [5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
- [5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
- [6, BulkImports::Common::Pipelines::EntityFinisher]
- ]
- end
-
subject do
entity = build(:bulk_import_entity, :project_entity)
@@ -41,9 +10,49 @@ RSpec.describe BulkImports::Projects::Stage do
end
describe '#pipelines' do
- it 'list all the pipelines with their stage number, ordered by stage' do
- expect(subject.pipelines & pipelines).to contain_exactly(*pipelines)
- expect(subject.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
+ it 'list all the pipelines' do
+ pipelines = subject.pipelines
+
+ expect(pipelines).to include(
+ hash_including({ stage: 0, pipeline: BulkImports::Projects::Pipelines::ProjectPipeline }),
+ hash_including({ stage: 1, pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline })
+ )
+ expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
+ end
+
+ it 'only have pipelines with valid keys' do
+ pipeline_keys = subject.pipelines.collect(&:keys).flatten.uniq
+ allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
+
+ expect(pipeline_keys - allowed_keys).to be_empty
+ end
+
+ it 'only has pipelines with valid versions' do
+ pipelines = subject.pipelines
+ minimum_source_versions = pipelines.collect { _1[:minimum_source_version] }.flatten.compact
+ maximum_source_versions = pipelines.collect { _1[:maximum_source_version] }.flatten.compact
+ version_regex = /^(\d+)\.(\d+)\.0$/
+
+ expect(minimum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ expect(maximum_source_versions.all? { version_regex =~ _1 }).to eq(true)
+ end
+
+ context 'when stages are out of order in the config hash' do
+ it 'list all the pipelines ordered by stage' do
+ allow_next_instance_of(BulkImports::Projects::Stage) do |stage|
+ allow(stage).to receive(:config).and_return(
+ {
+ a: { stage: 2 },
+ b: { stage: 1 },
+ c: { stage: 0 },
+ d: { stage: 2 }
+ }
+ )
+ end
+
+ expected_stages = subject.pipelines.collect { _1[:stage] }
+ expect(expected_stages).to eq([0, 1, 2, 2])
+ end
end
end
end
diff --git a/spec/lib/container_registry/migration_spec.rb b/spec/lib/container_registry/migration_spec.rb
index 81dac354b8b..fea66d3c8f4 100644
--- a/spec/lib/container_registry/migration_spec.rb
+++ b/spec/lib/container_registry/migration_spec.rb
@@ -58,17 +58,20 @@ RSpec.describe ContainerRegistry::Migration do
describe '.capacity' do
subject { described_class.capacity }
- where(:ff_1_enabled, :ff_2_enabled, :ff_5_enabled, :ff_10_enabled, :ff_25_enabled, :expected_result) do
- false | false | false | false | false | 0
- true | false | false | false | false | 1
- false | true | false | false | false | 2
- true | true | false | false | false | 2
- false | false | true | false | false | 5
- true | true | true | false | false | 5
- false | false | false | true | false | 10
- true | true | true | true | false | 10
- false | false | false | false | true | 25
- true | true | true | true | true | 25
+ where(:ff_1_enabled, :ff_2_enabled, :ff_5_enabled,
+ :ff_10_enabled, :ff_25_enabled, :ff_40_enabled, :expected_result) do
+ false | false | false | false | false | false | 0
+ true | false | false | false | false | false | 1
+ false | true | false | false | false | false | 2
+ true | true | false | false | false | false | 2
+ false | false | true | false | false | false | 5
+ true | true | true | false | false | false | 5
+ false | false | false | true | false | false | 10
+ true | true | true | true | false | false | 10
+ false | false | false | false | true | false | 25
+ true | true | true | true | true | false | 25
+ false | false | false | false | false | true | 40
+ true | true | true | true | true | true | 40
end
with_them do
@@ -78,7 +81,8 @@ RSpec.describe ContainerRegistry::Migration do
container_registry_migration_phase2_capacity_2: ff_2_enabled,
container_registry_migration_phase2_capacity_5: ff_5_enabled,
container_registry_migration_phase2_capacity_10: ff_10_enabled,
- container_registry_migration_phase2_capacity_25: ff_25_enabled
+ container_registry_migration_phase2_capacity_25: ff_25_enabled,
+ container_registry_migration_phase2_capacity_40: ff_40_enabled
)
end
@@ -182,6 +186,18 @@ RSpec.describe ContainerRegistry::Migration do
end
end
+ describe '.pre_import_tags_rate' do
+ let(:value) { 2.5 }
+
+ before do
+ stub_application_setting(container_registry_pre_import_tags_rate: value)
+ end
+
+ it 'returns the matching application_setting' do
+ expect(described_class.pre_import_tags_rate).to eq(value)
+ end
+ end
+
describe '.target_plans' do
subject { described_class.target_plans }
@@ -214,31 +230,30 @@ RSpec.describe ContainerRegistry::Migration do
end
end
- describe '.enqueue_twice?' do
- subject { described_class.enqueue_twice? }
+ describe '.delete_container_repository_worker_support?' do
+ subject { described_class.delete_container_repository_worker_support? }
it { is_expected.to eq(true) }
context 'feature flag disabled' do
before do
- stub_feature_flags(container_registry_migration_phase2_enqueue_twice: false)
+ stub_feature_flags(container_registry_migration_phase2_delete_container_repository_worker_support: false)
end
it { is_expected.to eq(false) }
end
end
- describe '.enqueue_loop?' do
- subject { described_class.enqueuer_loop? }
+ describe '.dynamic_pre_import_timeout_for' do
+ let(:container_repository) { build(:container_repository) }
- it { is_expected.to eq(true) }
+ subject { described_class.dynamic_pre_import_timeout_for(container_repository) }
- context 'feature flag disabled' do
- before do
- stub_feature_flags(container_registry_migration_phase2_enqueuer_loop: false)
- end
+ it 'returns the expected seconds' do
+ stub_application_setting(container_registry_pre_import_tags_rate: 0.6)
+ expect(container_repository).to receive(:tags_count).and_return(50)
- it { is_expected.to eq(false) }
+ expect(subject).to eq((0.6 * 50).seconds)
end
end
end
diff --git a/spec/lib/error_tracking/stacktrace_builder_spec.rb b/spec/lib/error_tracking/stacktrace_builder_spec.rb
new file mode 100644
index 00000000000..46d0bde8122
--- /dev/null
+++ b/spec/lib/error_tracking/stacktrace_builder_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'support/helpers/fast_rails_root'
+require 'oj'
+
+RSpec.describe ErrorTracking::StacktraceBuilder do
+ include FastRailsRoot
+
+ describe '#stacktrace' do
+ let(:original_payload) { Gitlab::Json.parse(File.read(rails_root_join('spec/fixtures', payload_file))) }
+ let(:payload) { original_payload }
+ let(:payload_file) { 'error_tracking/parsed_event.json' }
+
+ subject(:stacktrace) { described_class.new(payload).stacktrace }
+
+ context 'with full error context' do
+ it 'generates a correct stacktrace in expected format' do
+ expected_context = [
+ [132, " end\n"],
+ [133, "\n"],
+ [134, " begin\n"],
+ [135, " block.call(work, *extra)\n"],
+ [136, " rescue Exception => e\n"],
+ [137, " STDERR.puts \"Error reached top of thread-pool: #\{e.message\} (#\{e.class\})\"\n"],
+ [138, " end\n"]
+ ]
+
+ expected_entry = {
+ 'lineNo' => 135,
+ 'context' => expected_context,
+ 'filename' => 'puma/thread_pool.rb',
+ 'function' => 'block in spawn_thread',
+ 'colNo' => 0
+ }
+
+ expect(stacktrace).to be_kind_of(Array)
+ expect(stacktrace.first).to eq(expected_entry)
+ end
+ end
+
+ context 'when error context is missing' do
+ let(:payload_file) { 'error_tracking/browser_event.json' }
+
+ it 'generates a stacktrace without context' do
+ expected_entry = {
+ 'lineNo' => 6395,
+ 'context' => [],
+ 'filename' => 'webpack-internal:///./node_modules/vue/dist/vue.runtime.esm.js',
+ 'function' => 'hydrate',
+ 'colNo' => 0
+ }
+
+ expect(stacktrace).to be_kind_of(Array)
+ expect(stacktrace.first).to eq(expected_entry)
+ end
+ end
+
+ context 'with empty payload' do
+ let(:payload) { {} }
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without exception field' do
+ let(:payload) { original_payload.except('exception') }
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without exception.values field' do
+ before do
+ original_payload['exception'].delete('values')
+ end
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without any exception.values[].stacktrace fields' do
+ before do
+ original_payload.dig('exception', 'values').each { |value| value['stacktrace'] = '' }
+ end
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'without any exception.values[].stacktrace.frame fields' do
+ before do
+ original_payload.dig('exception', 'values').each { |value| value['stacktrace'].delete('frames') }
+ end
+
+ it { is_expected.to eq([]) }
+ end
+ end
+end
diff --git a/spec/lib/generators/gitlab/usage_metric_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
index 207ecb88aad..2bf50a5fa24 100644
--- a/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_generator_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
let(:sample_metric_dir) { 'lib/generators/gitlab/usage_metric_generator' }
let(:generic_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_generic_metric.rb')) }
let(:database_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_database_metric.rb')) }
+ let(:numbers_sample_metric) { fixture_file(File.join(sample_metric_dir, 'sample_numbers_metric.rb')) }
let(:sample_spec) { fixture_file(File.join(sample_metric_dir, 'sample_metric_test.rb')) }
it 'creates CE metric instrumentation files using the template' do
@@ -63,6 +64,17 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
end
end
+ context 'for numbers type' do
+ let(:options) { { 'type' => 'numbers', 'operation' => 'add' } }
+
+ it 'creates the metric instrumentation file using the template' do
+ described_class.new(args, options).invoke_all
+
+ expect_generated_file(ce_temp_dir, 'count_foo_metric.rb', numbers_sample_metric)
+ expect_generated_file(spec_ce_temp_dir, 'count_foo_metric_spec.rb', sample_spec)
+ end
+ end
+
context 'with type option missing' do
let(:options) { {} }
@@ -94,5 +106,21 @@ RSpec.describe Gitlab::UsageMetricGenerator, :silence_stdout do
expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation 'sleep'/)
end
end
+
+ context 'without operation for numbers metric' do
+ let(:options) { { 'type' => 'numbers' } }
+
+ it 'raises an ArgumentError' do
+ expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation ''/)
+ end
+ end
+
+ context 'with wrong operation for numbers metric' do
+ let(:options) { { 'type' => 'numbers', 'operation' => 'sleep' } }
+
+ it 'raises an ArgumentError' do
+ expect { described_class.new(args, options).invoke_all }.to raise_error(ArgumentError, /Unknown operation 'sleep'/)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
index 8eb75feaa8d..7e36d89a2a1 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb
@@ -27,6 +27,26 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do
expect(returned_iids).to eq(expected_issue_ids)
end
+
+ it 'passes a hash with all expected attributes to the serializer' do
+ expected_attributes = [
+ 'created_at',
+ 'id',
+ 'iid',
+ 'title',
+ :end_event_timestamp,
+ :start_event_timestamp,
+ :total_time,
+ :author,
+ :namespace_path,
+ :project_path
+ ]
+ serializer = instance_double(records_fetcher.send(:serializer).class.name)
+ allow(records_fetcher).to receive(:serializer).and_return(serializer)
+ expect(serializer).to receive(:represent).at_least(:once).with(hash_including(*expected_attributes)).and_return({})
+
+ records_fetcher.serialized_records
+ end
end
describe '#serialized_records' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index dc46dade87e..ec394bb9f05 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -8,15 +8,18 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
end
let(:params) { { from: 1.year.ago, current_user: user } }
+ let(:records_fetcher) do
+ Gitlab::Analytics::CycleAnalytics::DataCollector.new(
+ stage: stage,
+ params: params
+ ).records_fetcher
+ end
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:user) { create(:user) }
subject do
- Gitlab::Analytics::CycleAnalytics::DataCollector.new(
- stage: stage,
- params: params
- ).records_fetcher.serialized_records
+ records_fetcher.serialized_records
end
describe '#serialized_records' do
@@ -28,6 +31,26 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
it 'returns all records' do
expect(subject.size).to eq(2)
end
+
+ it 'passes a hash with all expected attributes to the serializer' do
+ expected_attributes = [
+ 'created_at',
+ 'id',
+ 'iid',
+ 'title',
+ 'end_event_timestamp',
+ 'start_event_timestamp',
+ 'total_time',
+ :author,
+ :namespace_path,
+ :project_path
+ ]
+ serializer = instance_double(records_fetcher.send(:serializer).class.name)
+ allow(records_fetcher).to receive(:serializer).and_return(serializer)
+ expect(serializer).to receive(:represent).twice.with(hash_including(*expected_attributes)).and_return({})
+
+ subject
+ end
end
describe 'for issue based stage' do
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index d86191ca0c2..bfea1315d90 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -94,7 +94,7 @@ module Gitlab
# Move this test back to the items hash when removing `use_cmark_renderer` feature flag.
it "does not convert dangerous fenced code with inline script into HTML" do
input = '```mypre"><script>alert(3)</script>'
- output = "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
+ output = "<div>\n<div>\n<div class=\"gl-relative markdown-code-block js-markdown-code\">\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" data-canonical-lang=\"mypre\" v-pre=\"true\"><code></code></pre>\n<copy-code></copy-code>\n</div>\n</div>\n</div>"
expect(render(input, context)).to include(output)
end
@@ -360,7 +360,7 @@ module Gitlab
<div>
<div>
<div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
+ <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" data-canonical-lang="js" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
<copy-code></copy-code>
</div>
</div>
@@ -390,7 +390,7 @@ module Gitlab
<div>class.cpp</div>
<div>
<div class="gl-relative markdown-code-block js-markdown-code">
- <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
+ <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" data-canonical-lang="c++" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include</span> <span class="cpf">&lt;stdio.h&gt;</span></span>
<span id="LC2" class="line" lang="cpp"></span>
<span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
<span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
diff --git a/spec/lib/gitlab/audit/unauthenticated_author_spec.rb b/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
index 4e5c477fc2a..70716ee7f4c 100644
--- a/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
+++ b/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
@@ -13,5 +13,11 @@ RSpec.describe Gitlab::Audit::UnauthenticatedAuthor do
expect(described_class.new)
.to have_attributes(id: -1, name: 'An unauthenticated user')
end
+
+ describe '#impersonated?' do
+ it 'returns false' do
+ expect(described_class.new.impersonated?).to be(false)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 6cb9085c3ad..5f5e7f211f8 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::OAuth::User do
include LdapHelpers
+ include TermsHelper
let(:oauth_user) { described_class.new(auth_hash) }
let(:oauth_user_2) { described_class.new(auth_hash_2) }
@@ -144,6 +145,49 @@ RSpec.describe Gitlab::Auth::OAuth::User do
expect(gl_user).to be_password_automatically_set
end
+ context 'terms of service' do
+ context 'when terms are enforced' do
+ before do
+ enforce_terms
+ end
+
+ context 'when feature flag update_oauth_registration_flow is enabled' do
+ before do
+ stub_feature_flags(update_oauth_registration_flow: true)
+ end
+
+ it 'creates the user with accepted terms' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+
+ expect(gl_user).to be_persisted
+ expect(gl_user.terms_accepted?).to be(true)
+ end
+ end
+
+ context 'when feature flag update_oauth_registration_flow is disabled' do
+ before do
+ stub_feature_flags(update_oauth_registration_flow: false)
+ end
+
+ it 'creates the user without accepted terms' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+
+ expect(gl_user).to be_persisted
+ expect(gl_user.terms_accepted?).to be(false)
+ end
+ end
+ end
+
+ context 'when terms are not enforced' do
+ it 'creates the user without accepted terms' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+
+ expect(gl_user).to be_persisted
+ expect(gl_user.terms_accepted?).to be(false)
+ end
+ end
+ end
+
shared_examples 'to verify compliance with allow_single_sign_on' do
context 'provider is marked as external' do
it 'marks user as external' do
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
index 2dcd4645c84..2949bc068c8 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_id_for_project_route_spec.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForProjectRoute do
+# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdForProjectRoute, schema: 20220606060825 do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb
new file mode 100644
index 00000000000..fd6c055b9f6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_feature_package_registry_access_level_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectFeaturePackageRegistryAccessLevel do
+ let(:non_null_project_features) { { pages_access_level: 20 } }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_features) { table(:project_features) }
+
+ let(:namespace1) { namespaces.create!(name: 'namespace 1', path: 'namespace1') }
+ let(:namespace2) { namespaces.create!(name: 'namespace 2', path: 'namespace2') }
+ let(:namespace3) { namespaces.create!(name: 'namespace 3', path: 'namespace3') }
+ let(:namespace4) { namespaces.create!(name: 'namespace 4', path: 'namespace4') }
+ let(:namespace5) { namespaces.create!(name: 'namespace 5', path: 'namespace5') }
+ let(:namespace6) { namespaces.create!(name: 'namespace 6', path: 'namespace6') }
+
+ let(:project1) do
+ projects.create!(namespace_id: namespace1.id, project_namespace_id: namespace1.id, packages_enabled: false)
+ end
+
+ let(:project2) do
+ projects.create!(namespace_id: namespace2.id, project_namespace_id: namespace2.id, packages_enabled: nil)
+ end
+
+ let(:project3) do
+ projects.create!(
+ namespace_id: namespace3.id,
+ project_namespace_id: namespace3.id,
+ packages_enabled: true,
+ visibility_level: Gitlab::VisibilityLevel::PRIVATE
+ )
+ end
+
+ let(:project4) do
+ projects.create!(
+ namespace_id: namespace4.id,
+ project_namespace_id: namespace4.id,
+ packages_enabled: true, visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ let(:project5) do
+ projects.create!(
+ namespace_id: namespace5.id,
+ project_namespace_id: namespace5.id,
+ packages_enabled: true,
+ visibility_level: Gitlab::VisibilityLevel::PUBLIC
+ )
+ end
+
+ let(:project6) do
+ projects.create!(namespace_id: namespace6.id, project_namespace_id: namespace6.id, packages_enabled: false)
+ end
+
+ let!(:project_feature1) do
+ project_features.create!(
+ project_id: project1.id,
+ package_registry_access_level: ProjectFeature::ENABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature2) do
+ project_features.create!(
+ project_id: project2.id,
+ package_registry_access_level: ProjectFeature::ENABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature3) do
+ project_features.create!(
+ project_id: project3.id,
+ package_registry_access_level: ProjectFeature::DISABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature4) do
+ project_features.create!(
+ project_id: project4.id,
+ package_registry_access_level: ProjectFeature::DISABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature5) do
+ project_features.create!(
+ project_id: project5.id,
+ package_registry_access_level: ProjectFeature::DISABLED,
+ **non_null_project_features
+ )
+ end
+
+ let!(:project_feature6) do
+ project_features.create!(
+ project_id: project6.id,
+ package_registry_access_level: ProjectFeature::ENABLED,
+ **non_null_project_features
+ )
+ end
+
+ subject(:perform_migration) do
+ described_class.new(start_id: project1.id,
+ end_id: project5.id,
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ it 'backfills project_features.package_registry_access_level', :aggregate_failures do
+ perform_migration
+
+ expect(project_feature1.reload.package_registry_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project_feature2.reload.package_registry_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project_feature3.reload.package_registry_access_level).to eq(ProjectFeature::PRIVATE)
+ expect(project_feature4.reload.package_registry_access_level).to eq(ProjectFeature::ENABLED)
+ expect(project_feature5.reload.package_registry_access_level).to eq(ProjectFeature::PUBLIC)
+ expect(project_feature6.reload.package_registry_access_level).to eq(ProjectFeature::ENABLED)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
new file mode 100644
index 00000000000..ca7ca41a33e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectMemberNamespaceId, :migration, schema: 20220516054011 do
+ let(:migration) do
+ described_class.new(start_id: 1, end_id: 10,
+ batch_table: table_name, batch_column: batch_column,
+ sub_batch_size: sub_batch_size, pause_ms: pause_ms,
+ connection: ApplicationRecord.connection)
+ end
+
+ let(:members_table) { table(:members) }
+ let(:projects_table) { table(:projects) }
+ let(:namespaces_table) { table(:namespaces) }
+
+ let(:table_name) { 'members' }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 100 }
+ let(:pause_ms) { 0 }
+
+ subject(:perform_migration) do
+ migration.perform
+ end
+
+ before do
+ namespaces_table.create!(id: 201, name: 'group1', path: 'group1', type: 'Group')
+ namespaces_table.create!(id: 202, name: 'group2', path: 'group2', type: 'Group')
+ namespaces_table.create!(id: 300, name: 'project-namespace-1', path: 'project-namespace-1-path', type: 'Project')
+ namespaces_table.create!(id: 301, name: 'project-namespace-2', path: 'project-namespace-2-path', type: 'Project')
+ namespaces_table.create!(id: 302, name: 'project-namespace-3', path: 'project-namespace-3-path', type: 'Project')
+
+ projects_table.create!(id: 100, name: 'project1', path: 'project1', namespace_id: 202, project_namespace_id: 300)
+ projects_table.create!(id: 101, name: 'project2', path: 'project2', namespace_id: 202, project_namespace_id: 301)
+ projects_table.create!(id: 102, name: 'project3', path: 'project3', namespace_id: 202, project_namespace_id: 302)
+
+ # project1, no member namespace (fill in)
+ members_table.create!(id: 1, source_id: 100,
+ source_type: 'Project', type: 'ProjectMember',
+ member_namespace_id: nil, access_level: 10, notification_level: 3)
+ # bogus source id, no member namespace id (do nothing)
+ members_table.create!(id: 2, source_id: non_existing_record_id,
+ source_type: 'Project', type: 'ProjectMember',
+ member_namespace_id: nil, access_level: 10, notification_level: 3)
+ # project3, existing member namespace id (do nothing)
+ members_table.create!(id: 3, source_id: 102,
+ source_type: 'Project', type: 'ProjectMember',
+ member_namespace_id: 300, access_level: 10, notification_level: 3)
+
+ # Group memberships (do not change)
+ # group1, no member namespace (do nothing)
+ members_table.create!(id: 4, source_id: 201,
+ source_type: 'Namespace', type: 'GroupMember',
+ member_namespace_id: nil, access_level: 10, notification_level: 3)
+ # group2, existing member namespace (do nothing)
+ members_table.create!(id: 5, source_id: 202,
+ source_type: 'Namespace', type: 'GroupMember',
+ member_namespace_id: 201, access_level: 10, notification_level: 3)
+
+ # Project Namespace memberships (do not change)
+ # project namespace, existing member namespace (do nothing)
+ members_table.create!(id: 6, source_id: 300,
+ source_type: 'Namespace', type: 'ProjectNamespaceMember',
+ member_namespace_id: 201, access_level: 10, notification_level: 3)
+ # project namespace, not member namespace (do nothing)
+ members_table.create!(id: 7, source_id: 301,
+ source_type: 'Namespace', type: 'ProjectNamespaceMember',
+ member_namespace_id: 201, access_level: 10, notification_level: 3)
+ end
+
+ it 'backfills `member_namespace_id` for the selected records', :aggregate_failures do
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 2
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
+
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ # rubocop:disable Layout/LineLength
+ expect(queries.count).to eq(3)
+ expect(members_table.where(type: 'ProjectMember', member_namespace_id: nil).count).to eq 1 # just the bogus one
+ expect(members_table.where(type: 'ProjectMember').pluck(:member_namespace_id)).to match_array([nil, 300, 300])
+ expect(members_table.where(type: 'GroupMember', member_namespace_id: nil).count).to eq 1
+ expect(members_table.where(type: 'GroupMember').pluck(:member_namespace_id)).to match_array([nil, 201])
+ # rubocop:enable Layout/LineLength
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+
+ context 'when given a negative pause_ms' do
+ let(:pause_ms) { -9 }
+ let(:sub_batch_size) { 2 }
+
+ it 'uses 0 as a floor for pause_ms' do
+ expect(migration).to receive(:sleep).with(0)
+
+ perform_migration
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
new file mode 100644
index 00000000000..a09d5559d33
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/cleanup_orphaned_routes_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# this needs the schema to be before we introduce the not null constraint on routes#namespace_id
+RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedRoutes, schema: 20220606060825 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:routes) { table(:routes) }
+
+ let!(:namespace1) { namespaces.create!(name: 'batchtest1', type: 'Group', path: 'space1') }
+ let!(:namespace2) { namespaces.create!(name: 'batchtest2', type: 'Group', parent_id: namespace1.id, path: 'space2') }
+ let!(:namespace3) { namespaces.create!(name: 'batchtest3', type: 'Group', parent_id: namespace2.id, path: 'space3') }
+
+ let!(:proj_namespace1) { namespaces.create!(name: 'proj1', path: 'proj1', type: 'Project', parent_id: namespace1.id) }
+ let!(:proj_namespace2) { namespaces.create!(name: 'proj2', path: 'proj2', type: 'Project', parent_id: namespace2.id) }
+ let!(:proj_namespace3) { namespaces.create!(name: 'proj3', path: 'proj3', type: 'Project', parent_id: namespace3.id) }
+
+ # rubocop:disable Layout/LineLength
+ let!(:proj1) { projects.create!(name: 'proj1', path: 'proj1', namespace_id: namespace1.id, project_namespace_id: proj_namespace1.id) }
+ let!(:proj2) { projects.create!(name: 'proj2', path: 'proj2', namespace_id: namespace2.id, project_namespace_id: proj_namespace2.id) }
+ let!(:proj3) { projects.create!(name: 'proj3', path: 'proj3', namespace_id: namespace3.id, project_namespace_id: proj_namespace3.id) }
+
+ # valid namespace routes with not null namespace_id
+ let!(:namespace_route1) { routes.create!(path: 'space1', source_id: namespace1.id, source_type: 'Namespace', namespace_id: namespace1.id) }
+ # valid namespace routes with null namespace_id
+ let!(:namespace_route2) { routes.create!(path: 'space1/space2', source_id: namespace2.id, source_type: 'Namespace') }
+ let!(:namespace_route3) { routes.create!(path: 'space1/space3', source_id: namespace3.id, source_type: 'Namespace') }
+ # invalid/orphaned namespace route
+ let!(:orphaned_namespace_route_a) { routes.create!(path: 'space1/space4', source_id: non_existing_record_id, source_type: 'Namespace') }
+ let!(:orphaned_namespace_route_b) { routes.create!(path: 'space1/space5', source_id: non_existing_record_id - 1, source_type: 'Namespace') }
+
+ # valid project routes with not null namespace_id
+ let!(:proj_route1) { routes.create!(path: 'space1/proj1', source_id: proj1.id, source_type: 'Project', namespace_id: proj_namespace1.id) }
+ # valid project routes with null namespace_id
+ let!(:proj_route2) { routes.create!(path: 'space1/space2/proj2', source_id: proj2.id, source_type: 'Project') }
+ let!(:proj_route3) { routes.create!(path: 'space1/space3/proj3', source_id: proj3.id, source_type: 'Project') }
+ # invalid/orphaned namespace route
+ let!(:orphaned_project_route_a) { routes.create!(path: 'space1/space3/proj5', source_id: non_existing_record_id, source_type: 'Project') }
+ let!(:orphaned_project_route_b) { routes.create!(path: 'space1/space3/proj6', source_id: non_existing_record_id - 1, source_type: 'Project') }
+ # rubocop:enable Layout/LineLength
+
+ let!(:migration_attrs) do
+ {
+ start_id: Route.minimum(:id),
+ end_id: Route.maximum(:id),
+ batch_table: :routes,
+ batch_column: :id,
+ sub_batch_size: 100,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ }
+ end
+
+ let!(:migration) { described_class.new(**migration_attrs) }
+
+ subject(:perform_migration) { migration.perform }
+
+ it 'cleans orphaned routes', :aggregate_failures do
+ all_route_ids = Route.pluck(:id)
+
+ orphaned_route_ids = [
+ orphaned_namespace_route_a, orphaned_namespace_route_b, orphaned_project_route_a, orphaned_project_route_b
+ ].pluck(:id)
+ remaining_routes = (all_route_ids - orphaned_route_ids).sort
+
+ expect { perform_migration }.to change { Route.pluck(:id) }.to contain_exactly(*remaining_routes)
+ expect(Route.all).to all(have_attributes(namespace_id: be_present))
+
+ # expect that routes that had namespace_id set did not change namespace_id
+ expect(namespace_route1.reload.namespace_id).to eq(namespace1.id)
+ expect(proj_route1.reload.namespace_id).to eq(proj_namespace1.id)
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
index 7334867e8fb..38e8b159e63 100644
--- a/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
+++ b/spec/lib/gitlab/background_migration/encrypt_integration_properties_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties do
+RSpec.describe Gitlab::BackgroundMigration::EncryptIntegrationProperties, schema: 20220415124804 do
let(:integrations) do
table(:integrations) do |integrations|
integrations.send :attr_encrypted, :encrypted_properties_tmp,
diff --git a/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb
index c343ee438b8..99df21562b0 100644
--- a/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb
+++ b/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb
@@ -2,314 +2,23 @@
require 'spec_helper'
-# The underlying migration relies on the global models (e.g. Project). This
-# means we also need to use FactoryBot factories to ensure everything is
-# operating using the same types. If we use `table()` and similar methods we
-# would have to duplicate a lot of logic just for these tests.
-#
# rubocop: disable RSpec/FactoriesInMigrationSpecs
RSpec.describe Gitlab::BackgroundMigration::FixMergeRequestDiffCommitUsers do
let(:migration) { described_class.new }
describe '#perform' do
context 'when the project exists' do
- it 'processes the project' do
+ it 'does nothing' do
project = create(:project)
- expect(migration).to receive(:process).with(project)
- expect(migration).to receive(:schedule_next_job)
-
- migration.perform(project.id)
- end
-
- it 'marks the background job as finished' do
- project = create(:project)
-
- Gitlab::Database::BackgroundMigrationJob.create!(
- class_name: 'FixMergeRequestDiffCommitUsers',
- arguments: [project.id]
- )
-
- migration.perform(project.id)
-
- job = Gitlab::Database::BackgroundMigrationJob
- .find_by(class_name: 'FixMergeRequestDiffCommitUsers')
-
- expect(job.status).to eq('succeeded')
+ expect { migration.perform(project.id) }.not_to raise_error
end
end
context 'when the project does not exist' do
it 'does nothing' do
- expect(migration).not_to receive(:process)
- expect(migration).to receive(:schedule_next_job)
-
- migration.perform(-1)
- end
- end
- end
-
- describe '#process' do
- it 'processes the merge requests of the project' do
- project = create(:project, :repository)
- commit = project.commit
- mr = create(
- :merge_request_with_diffs,
- source_project: project,
- target_project: project
- )
-
- diff = mr.merge_request_diffs.first
-
- create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000
- )
-
- migration.process(project)
-
- updated = diff
- .merge_request_diff_commits
- .find_by(sha: commit.sha, relative_order: 9000)
-
- expect(updated.commit_author_id).not_to be_nil
- expect(updated.committer_id).not_to be_nil
- end
- end
-
- describe '#update_commit' do
- let(:project) { create(:project, :repository) }
- let(:mr) do
- create(
- :merge_request_with_diffs,
- source_project: project,
- target_project: project
- )
- end
-
- let(:diff) { mr.merge_request_diffs.first }
- let(:commit) { project.commit }
-
- def update_row(migration, project, diff, row)
- migration.update_commit(project, row)
-
- diff
- .merge_request_diff_commits
- .find_by(sha: row.sha, relative_order: row.relative_order)
- end
-
- it 'populates missing commit authors' do
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.commit_author.name).to eq(commit.to_hash[:author_name])
- expect(updated.commit_author.email).to eq(commit.to_hash[:author_email])
- end
-
- it 'populates missing committers' do
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.committer.name).to eq(commit.to_hash[:committer_name])
- expect(updated.committer.email).to eq(commit.to_hash[:committer_email])
- end
-
- it 'leaves existing commit authors as-is' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- commit_author: user
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.commit_author).to eq(user)
- end
-
- it 'leaves existing committers as-is' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- committer: user
- )
-
- updated = update_row(migration, project, diff, commit_row)
-
- expect(updated.committer).to eq(user)
- end
-
- it 'does nothing when both the author and committer are present' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- committer: user,
- commit_author: user
- )
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.update_commit(project, commit_row)
- end
-
- expect(recorder.count).to be_zero
- end
-
- it 'does nothing if the commit does not exist in Git' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: 'kittens',
- relative_order: 9000,
- committer: user,
- commit_author: user
- )
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.update_commit(project, commit_row)
+ expect { migration.perform(-1) }.not_to raise_error
end
-
- expect(recorder.count).to be_zero
- end
-
- it 'does nothing when the committer/author are missing in the Git commit' do
- user = create(:merge_request_diff_commit_user)
- commit_row = create(
- :merge_request_diff_commit,
- merge_request_diff: diff,
- sha: commit.sha,
- relative_order: 9000,
- committer: user,
- commit_author: user
- )
-
- allow(migration).to receive(:find_or_create_user).and_return(nil)
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.update_commit(project, commit_row)
- end
-
- expect(recorder.count).to be_zero
- end
- end
-
- describe '#schedule_next_job' do
- it 'schedules the next background migration' do
- Gitlab::Database::BackgroundMigrationJob
- .create!(class_name: 'FixMergeRequestDiffCommitUsers', arguments: [42])
-
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .with(2.minutes, 'FixMergeRequestDiffCommitUsers', [42])
-
- migration.schedule_next_job
- end
-
- it 'does nothing when there are no jobs' do
- expect(BackgroundMigrationWorker)
- .not_to receive(:perform_in)
-
- migration.schedule_next_job
- end
- end
-
- describe '#find_commit' do
- let(:project) { create(:project, :repository) }
-
- it 'finds a commit using Git' do
- commit = project.commit
- found = migration.find_commit(project, commit.sha)
-
- expect(found).to eq(commit.to_hash)
- end
-
- it 'caches the results' do
- commit = project.commit
-
- migration.find_commit(project, commit.sha)
-
- expect { migration.find_commit(project, commit.sha) }
- .not_to change { Gitlab::GitalyClient.get_request_count }
- end
-
- it 'returns an empty hash if the commit does not exist' do
- expect(migration.find_commit(project, 'kittens')).to eq({})
- end
- end
-
- describe '#find_or_create_user' do
- let(:project) { create(:project, :repository) }
-
- it 'creates missing users' do
- commit = project.commit.to_hash
- id = migration.find_or_create_user(commit, :author_name, :author_email)
-
- expect(MergeRequest::DiffCommitUser.count).to eq(1)
-
- created = MergeRequest::DiffCommitUser.first
-
- expect(created.name).to eq(commit[:author_name])
- expect(created.email).to eq(commit[:author_email])
- expect(created.id).to eq(id)
- end
-
- it 'returns users that already exist' do
- commit = project.commit.to_hash
- user1 = migration.find_or_create_user(commit, :author_name, :author_email)
- user2 = migration.find_or_create_user(commit, :author_name, :author_email)
-
- expect(user1).to eq(user2)
- end
-
- it 'caches the results' do
- commit = project.commit.to_hash
-
- migration.find_or_create_user(commit, :author_name, :author_email)
-
- recorder = ActiveRecord::QueryRecorder.new do
- migration.find_or_create_user(commit, :author_name, :author_email)
- end
-
- expect(recorder.count).to be_zero
- end
-
- it 'returns nil if the commit details are missing' do
- id = migration.find_or_create_user({}, :author_name, :author_email)
-
- expect(id).to be_nil
- end
- end
-
- describe '#matches_row' do
- it 'returns the query matches for the composite primary key' do
- row = double(:commit, merge_request_diff_id: 4, relative_order: 5)
- arel = migration.matches_row(row)
-
- expect(arel.to_sql).to eq(
- '("merge_request_diff_commits"."merge_request_diff_id", "merge_request_diff_commits"."relative_order") = (4, 5)'
- )
end
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb
deleted file mode 100644
index 557dd8ddee6..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigratePagesToZipStorage do
- let(:namespace) { create(:group) } # rubocop: disable RSpec/FactoriesInMigrationSpecs
- let(:migration) { described_class.new }
-
- describe '#perform' do
- context 'when there is project to migrate' do
- let!(:project) { create_project('project') }
-
- after do
- FileUtils.rm_rf(project.pages_path)
- end
-
- it 'migrates project to zip storage' do
- expect_next_instance_of(::Pages::MigrateFromLegacyStorageService,
- anything,
- ignore_invalid_entries: false,
- mark_projects_as_not_deployed: false) do |service|
- expect(service).to receive(:execute_for_batch).with(project.id..project.id).and_call_original
- end
-
- migration.perform(project.id, project.id)
-
- expect(project.reload.pages_metadatum.pages_deployment.file.filename).to eq("_migrated.zip")
- end
- end
- end
-
- def create_project(path)
- project = create(:project) # rubocop: disable RSpec/FactoriesInMigrationSpecs
- project.mark_pages_as_deployed
-
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
- end
-
- project
- end
-end
diff --git a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
new file mode 100644
index 00000000000..035ea6eadcf
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::SetLegacyOpenSourceLicenseAvailableForNonPublicProjects,
+ :migration,
+ schema: 20220520040416 do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:project_settings_table) { table(:project_settings) }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: 1,
+ end_id: 30,
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ let(:queries) { ActiveRecord::QueryRecorder.new { perform_migration } }
+
+ before do
+ namespaces_table.create!(id: 1, name: 'namespace', path: 'namespace-path-1')
+ namespaces_table.create!(id: 2, name: 'namespace', path: 'namespace-path-2', type: 'Project')
+ namespaces_table.create!(id: 3, name: 'namespace', path: 'namespace-path-3', type: 'Project')
+ namespaces_table.create!(id: 4, name: 'namespace', path: 'namespace-path-4', type: 'Project')
+
+ projects_table
+ .create!(id: 11, name: 'proj-1', path: 'path-1', namespace_id: 1, project_namespace_id: 2, visibility_level: 0)
+ projects_table
+ .create!(id: 12, name: 'proj-2', path: 'path-2', namespace_id: 1, project_namespace_id: 3, visibility_level: 10)
+ projects_table
+ .create!(id: 13, name: 'proj-3', path: 'path-3', namespace_id: 1, project_namespace_id: 4, visibility_level: 20)
+
+ project_settings_table.create!(project_id: 11, legacy_open_source_license_available: true)
+ project_settings_table.create!(project_id: 12, legacy_open_source_license_available: true)
+ project_settings_table.create!(project_id: 13, legacy_open_source_license_available: true)
+ end
+
+ it 'sets `legacy_open_source_license_available` attribute to false for non-public projects', :aggregate_failures do
+ expect(queries.count).to eq(3)
+
+ expect(migrated_attribute(11)).to be_falsey
+ expect(migrated_attribute(12)).to be_falsey
+ expect(migrated_attribute(13)).to be_truthy
+ end
+
+ def migrated_attribute(project_id)
+ project_settings_table.find(project_id).legacy_open_source_license_available
+ end
+end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
index d2abdb740f8..ab4be5a909a 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
let(:project) { create(:project, :repository, import_url: import_url, creator: project_creator) }
let(:now) { Time.now.utc.change(usec: 0) }
let(:project_key) { 'TEST' }
- let(:repo_slug) { 'rouge' }
+ let(:repo_slug) { 'rouge-repo' }
let(:sample) { RepoHelpers.sample_compare }
subject { described_class.new(project, recover_missing_commits: true) }
diff --git a/spec/lib/gitlab/checks/changes_access_spec.rb b/spec/lib/gitlab/checks/changes_access_spec.rb
index 41ec11c1055..60118823b5a 100644
--- a/spec/lib/gitlab/checks/changes_access_spec.rb
+++ b/spec/lib/gitlab/checks/changes_access_spec.rb
@@ -49,56 +49,26 @@ RSpec.describe Gitlab::Checks::ChangesAccess do
context 'when changes contain empty revisions' do
let(:expected_commit) { instance_double(Commit) }
- let(:expected_allow_quarantine) { allow_quarantine }
shared_examples 'returns only commits with non empty revisions' do
- before do
- stub_feature_flags(filter_quarantined_commits: filter_quarantined_commits)
- end
-
specify do
expect(project.repository)
.to receive(:new_commits)
- .with([newrev], allow_quarantine: expected_allow_quarantine) { [expected_commit] }
+ .with([newrev]) { [expected_commit] }
expect(subject.commits).to match_array([expected_commit])
end
end
- it_behaves_like 'returns only commits with non empty revisions' do
+ context 'with oldrev' do
let(:changes) { [{ oldrev: oldrev, newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
- let(:allow_quarantine) { true }
- let(:filter_quarantined_commits) { true }
+
+ it_behaves_like 'returns only commits with non empty revisions'
end
context 'without oldrev' do
let(:changes) { [{ newrev: newrev }, { newrev: '' }, { newrev: Gitlab::Git::BLANK_SHA }] }
- context 'with disallowed quarantine' do
- # The quarantine directory should not be used because we're lacking
- # oldrev, and we're not filtering commits.
- let(:allow_quarantine) { false }
- let(:filter_quarantined_commits) { false }
-
- it_behaves_like 'returns only commits with non empty revisions'
- end
-
- context 'with allowed quarantine and :filter_quarantined_commits disabled' do
- # When we allow usage of the quarantine but have no oldrev and we're
- # not filtering commits then results returned by the quarantine aren't
- # accurate. We thus mustn't try using it.
- let(:allow_quarantine) { true }
- let(:filter_quarantined_commits) { false }
- let(:expected_allow_quarantine) { false }
-
- it_behaves_like 'returns only commits with non empty revisions'
- end
-
- context 'with allowed quarantine and :filter_quarantined_commits enabled' do
- let(:allow_quarantine) { true }
- let(:filter_quarantined_commits) { true }
-
- it_behaves_like 'returns only commits with non empty revisions'
- end
+ it_behaves_like 'returns only commits with non empty revisions'
end
end
end
diff --git a/spec/lib/gitlab/checks/single_change_access_spec.rb b/spec/lib/gitlab/checks/single_change_access_spec.rb
index 1b34e58797e..8d9f96dd2b4 100644
--- a/spec/lib/gitlab/checks/single_change_access_spec.rb
+++ b/spec/lib/gitlab/checks/single_change_access_spec.rb
@@ -96,26 +96,14 @@ RSpec.describe Gitlab::Checks::SingleChangeAccess do
let(:provided_commits) { nil }
before do
- stub_feature_flags(filter_quarantined_commits: filter_quarantined_commits)
-
expect(project.repository)
.to receive(:new_commits)
- .with(newrev, allow_quarantine: filter_quarantined_commits)
+ .with(newrev)
.once
.and_return(expected_commits)
end
- context 'with :filter_quarantined_commits disabled' do
- let(:filter_quarantined_commits) { false }
-
- it_behaves_like '#commits'
- end
-
- context 'with :filter_quarantined_commits enabled' do
- let(:filter_quarantined_commits) { true }
-
- it_behaves_like '#commits'
- end
+ it_behaves_like '#commits'
end
end
end
diff --git a/spec/lib/gitlab/checks/tag_check_spec.rb b/spec/lib/gitlab/checks/tag_check_spec.rb
index e2e7d9c9648..6cd3a2d1c07 100644
--- a/spec/lib/gitlab/checks/tag_check_spec.rb
+++ b/spec/lib/gitlab/checks/tag_check_spec.rb
@@ -26,8 +26,18 @@ RSpec.describe Gitlab::Checks::TagCheck do
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
- it 'is prevented' do
- expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /cannot be deleted/)
+ context 'via web interface' do
+ let(:protocol) { 'web' }
+
+ it 'is allowed' do
+ expect { subject.validate! }.not_to raise_error
+ end
+ end
+
+ context 'via SSH' do
+ it 'is prevented' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /only delete.*web interface/)
+ end
end
end
@@ -41,6 +51,21 @@ RSpec.describe Gitlab::Checks::TagCheck do
end
end
+ context 'as developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'deletion' do
+ let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
+ let(:newrev) { '0000000000000000000000000000000000000000' }
+
+ it 'is prevented' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, /not allowed to delete/)
+ end
+ end
+ end
+
context 'creation' do
let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
diff --git a/spec/lib/gitlab/ci/build/image_spec.rb b/spec/lib/gitlab/ci/build/image_spec.rb
index 630dfcd06bb..8f77a1f60ad 100644
--- a/spec/lib/gitlab/ci/build/image_spec.rb
+++ b/spec/lib/gitlab/ci/build/image_spec.rb
@@ -28,8 +28,14 @@ RSpec.describe Gitlab::Ci::Build::Image do
context 'when image is defined as hash' do
let(:entrypoint) { '/bin/sh' }
+ let(:pull_policy) { %w[always if-not-present] }
- let(:job) { create(:ci_build, options: { image: { name: image_name, entrypoint: entrypoint, ports: [80] } } ) }
+ let(:job) do
+ create(:ci_build, options: { image: { name: image_name,
+ entrypoint: entrypoint,
+ ports: [80],
+ pull_policy: pull_policy } } )
+ end
it 'fabricates an object of the proper class' do
is_expected.to be_kind_of(described_class)
@@ -38,6 +44,7 @@ RSpec.describe Gitlab::Ci::Build::Image do
it 'populates fabricated object with the proper attributes' do
expect(subject.name).to eq(image_name)
expect(subject.entrypoint).to eq(entrypoint)
+ expect(subject.pull_policy).to eq(pull_policy)
end
it 'populates the ports' do
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index e16a9a7a74a..bd1ab5d8c41 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -1,8 +1,16 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'support/helpers/stubbed_feature'
+require 'support/helpers/stub_feature_flags'
RSpec.describe Gitlab::Ci::Config::Entry::Image do
+ include StubFeatureFlags
+
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: true)
+ end
+
let(:entry) { described_class.new(config) }
context 'when configuration is a string' do
@@ -43,6 +51,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
expect(entry.ports).to be_nil
end
end
+
+ describe '#pull_policy' do
+ it "returns nil" do
+ expect(entry.pull_policy).to be_nil
+ end
+ end
end
context 'when configuration is a hash' do
@@ -109,6 +123,56 @@ RSpec.describe Gitlab::Ci::Config::Entry::Image do
end
end
end
+
+ context 'when configuration has pull_policy' do
+ let(:config) { { name: 'image:1.0', pull_policy: 'if-not-present' } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ entry.compose!
+
+ expect(entry).to be_valid
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'is not valid' do
+ entry.compose!
+
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include('image config contains unknown keys: pull_policy')
+ end
+ end
+ end
+
+ describe '#value' do
+ it "returns value" do
+ entry.compose!
+
+ expect(entry.value).to eq(
+ name: 'image:1.0',
+ pull_policy: ['if-not-present']
+ )
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it 'is not valid' do
+ entry.compose!
+
+ expect(entry.value).to eq(
+ name: 'image:1.0'
+ )
+ end
+ end
+ end
+ end
end
context 'when entry value is not correct' do
diff --git a/spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb b/spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb
new file mode 100644
index 00000000000..c35355b10c6
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/pull_policy_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::PullPolicy do
+ let(:entry) { described_class.new(config) }
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ context 'when config value is nil' do
+ let(:config) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when retry value is an empty array' do
+ let(:config) { [] }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'when retry value is string' do
+ let(:config) { "always" }
+
+ it { is_expected.to eq(%w[always]) }
+ end
+
+ context 'when retry value is array' do
+ let(:config) { %w[always if-not-present] }
+
+ it { is_expected.to eq(%w[always if-not-present]) }
+ end
+ end
+
+ describe 'validation' do
+ subject(:valid?) { entry.valid? }
+
+ context 'when retry value is nil' do
+ let(:config) { nil }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when retry value is an empty array' do
+ let(:config) { [] }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when retry value is a hash' do
+ let(:config) { {} }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when retry value is string' do
+ let(:config) { "always" }
+
+ it { is_expected.to eq(true) }
+
+ context 'when it is an invalid policy' do
+ let(:config) { "invalid" }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when it is an empty string' do
+ let(:config) { "" }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when retry value is array' do
+ let(:config) { %w[always if-not-present] }
+
+ it { is_expected.to eq(true) }
+
+ context 'when config contains an invalid policy' do
+ let(:config) { %w[always invalid] }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
new file mode 100644
index 00000000000..3ed4a9f263f
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
+ let(:factory) do
+ Gitlab::Config::Entry::Factory.new(described_class)
+ .value(config)
+ end
+
+ subject(:entry) { factory.create! }
+
+ before do
+ entry.compose!
+ end
+
+ describe '.new' do
+ context 'when using a string array' do
+ let(:config) { %w[app/ lib/ spec/ other/* paths/**/*.rb] }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when using an integer array' do
+ let(:config) { [1, 2] }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(entry.errors).to include(/changes config should be an array of strings/)
+ end
+ end
+
+ context 'when using a string' do
+ let(:config) { 'a regular string' }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports an error about invalid policy' do
+ expect(entry.errors).to include(/should be an array of strings/)
+ end
+ end
+
+ context 'when using a long array' do
+ let(:config) { ['app/'] * 51 }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns errors' do
+ expect(entry.errors).to include(/has too many entries \(maximum 50\)/)
+ end
+ end
+
+ context 'when clause is empty' do
+ let(:config) {}
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when policy strategy does not match' do
+ let(:config) { 'string strategy' }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include(/should be an array of strings/)
+ end
+ end
+ end
+
+ describe '#value' do
+ subject(:value) { entry.value }
+
+ context 'when using a string array' do
+ let(:config) { %w[app/ lib/ spec/ other/* paths/**/*.rb] }
+
+ it { is_expected.to eq(config) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 86270788431..89d349efe8f 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -18,6 +18,10 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
let(:entry) { factory.create! }
+ before do
+ entry.compose!
+ end
+
describe '.new' do
subject { entry }
@@ -121,7 +125,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns errors' do
- expect(subject.errors).to include(/changes should be an array of strings/)
+ expect(subject.errors).to include(/changes config should be an array of strings/)
end
end
@@ -131,7 +135,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.not_to be_valid }
it 'returns errors' do
- expect(subject.errors).to include(/changes is too long \(maximum is 50 characters\)/)
+ expect(subject.errors).to include(/changes config has too many entries \(maximum 50\)/)
end
end
@@ -434,6 +438,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
end
describe '.default' do
+ let(:config) {}
+
it 'does not have default value' do
expect(described_class.default).to be_nil
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 2d2adf09a42..7e1b31fea6a 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
include StubRequests
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:user) { project.owner }
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
@@ -34,6 +34,19 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
describe '#process' do
subject(:process) { mapper.process }
+ shared_examples 'logging config file fetch' do |key, count|
+ it 'propagates the pipeline logger' do
+ process
+
+ fetch_content_log_count = mapper
+ .logger
+ .observations_hash
+ .dig(key, 'count')
+
+ expect(fetch_content_log_count).to eq(count)
+ end
+ end
+
context "when single 'include' keyword is defined" do
context 'when the string is a local file' do
let(:values) do
@@ -45,6 +58,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Local))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_local_content_duration_s', 1
end
context 'when the key is a local file hash' do
@@ -68,6 +83,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Remote))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_remote_content_duration_s', 1
end
context 'when the key is a remote file hash' do
@@ -92,6 +109,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Template))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_template_content_duration_s', 1
end
context 'when the key is a hash of file and remote' do
@@ -118,6 +137,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect(subject).to contain_exactly(
an_instance_of(Gitlab::Ci::Config::External::File::Project))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 1
end
context "when the key is project's files" do
@@ -131,6 +152,8 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
an_instance_of(Gitlab::Ci::Config::External::File::Project),
an_instance_of(Gitlab::Ci::Config::External::File::Project))
end
+
+ it_behaves_like 'logging config file fetch', 'config_file_fetch_project_content_duration_s', 2
end
end
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
index b0d6f5adfb1..179e2efc0c7 100644
--- a/spec/lib/gitlab/ci/jwt_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -160,20 +160,8 @@ RSpec.describe Gitlab::Ci::Jwt do
subject(:jwt) { described_class.for_build(build) }
- context 'when ci_jwt_signing_key feature flag is disabled' do
+ context 'when ci_jwt_signing_key is present' do
before do
- stub_feature_flags(ci_jwt_signing_key: false)
-
- allow(Rails.application.secrets).to receive(:openid_connect_signing_key).and_return(rsa_key_data)
- end
-
- it_behaves_like 'generating JWT for build'
- end
-
- context 'when ci_jwt_signing_key feature flag is enabled' do
- before do
- stub_feature_flags(ci_jwt_signing_key: true)
-
stub_application_setting(ci_jwt_signing_key: rsa_key_data)
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
index 0580cb9922b..a9851d78f48 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/sax_document_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::SaxDocument do
subject(:parse_report) { Nokogiri::XML::SAX::Parser.new(described_class.new(coverage_report, project_path, paths)).parse(cobertura) }
describe '#parse!' do
- let(:coverage_report) { Gitlab::Ci::Reports::CoverageReports.new }
+ let(:coverage_report) { Gitlab::Ci::Reports::CoverageReport.new }
let(:project_path) { 'foo/bar' }
let(:paths) { ['app/user.rb'] }
diff --git a/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
index 1d361e16aad..e8f1d617cb7 100644
--- a/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/secret_detection_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::SecretDetection do
end
it "generates expected metadata_version" do
- expect(report.findings.first.metadata_version).to eq('3.0')
+ expect(report.findings.first.metadata_version).to eq('14.1.2')
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb
index aa8aec2af4a..69d809aee85 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/rate_limit_spec.rb
@@ -30,10 +30,8 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
context 'when the limit is exceeded' do
before do
- allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
- .and_return(pipelines_create: { threshold: 1, interval: 1.minute })
-
- stub_feature_flags(ci_throttle_pipelines_creation_dry_run: false)
+ stub_application_setting(pipeline_limit_per_project_user_sha: 1)
+ stub_feature_flags(ci_enforce_throttle_pipelines_creation_override: false)
end
it 'does not persist the pipeline' do
@@ -55,7 +53,9 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
class: described_class.name,
project_id: project.id,
subscription_plan: project.actual_plan_name,
- commit_sha: command.sha
+ commit_sha: command.sha,
+ throttled: true,
+ throttle_override: false
)
)
@@ -101,9 +101,9 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
end
end
- context 'when ci_throttle_pipelines_creation is disabled' do
+ context 'when ci_enforce_throttle_pipelines_creation is disabled' do
before do
- stub_feature_flags(ci_throttle_pipelines_creation: false)
+ stub_feature_flags(ci_enforce_throttle_pipelines_creation: false)
end
it 'does not break the chain' do
@@ -118,16 +118,25 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
expect(pipeline.errors).to be_empty
end
- it 'does not log anything' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ a_hash_including(
+ class: described_class.name,
+ project_id: project.id,
+ subscription_plan: project.actual_plan_name,
+ commit_sha: command.sha,
+ throttled: false,
+ throttle_override: false
+ )
+ )
perform
end
end
- context 'when ci_throttle_pipelines_creation_dry_run is enabled' do
+ context 'when ci_enforce_throttle_pipelines_creation_override is enabled' do
before do
- stub_feature_flags(ci_throttle_pipelines_creation_dry_run: true)
+ stub_feature_flags(ci_enforce_throttle_pipelines_creation_override: true)
end
it 'does not break the chain' do
@@ -148,7 +157,9 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::RateLimit, :freeze_time, :c
class: described_class.name,
project_id: project.id,
subscription_plan: project.actual_plan_name,
- commit_sha: command.sha
+ commit_sha: command.sha,
+ throttled: false,
+ throttle_override: true
)
)
diff --git a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
index 5b0917c5c6f..8f727749ee2 100644
--- a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
@@ -4,9 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Quota::Deployments do
let_it_be_with_refind(:namespace) { create(:namespace) }
- let_it_be_with_reload(:default_plan) { create(:default_plan) }
let_it_be_with_reload(:project) { create(:project, :repository, namespace: namespace) }
- let_it_be(:plan_limits) { create(:plan_limits, plan: default_plan) }
+ let_it_be(:plan_limits) { create(:plan_limits, :default_plan) }
let(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
diff --git a/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb b/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb
new file mode 100644
index 00000000000..eec218346c2
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/coverage_report_generator_spec.rb
@@ -0,0 +1,104 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::CoverageReportGenerator, factory_default: :keep do
+ let_it_be(:project) { create_default(:project, :repository).freeze }
+ let_it_be(:pipeline) { build(:ci_pipeline, :with_coverage_reports) }
+
+ describe '#report' do
+ subject { described_class.new(pipeline).report }
+
+ let_it_be(:pipeline) { create(:ci_pipeline, :success) }
+
+ shared_examples 'having a coverage report' do
+ it 'returns coverage reports with collected data' do
+ expected_files = [
+ "auth/token.go",
+ "auth/rpccredentials.go",
+ "app/controllers/abuse_reports_controller.rb"
+ ]
+
+ expect(subject.files.keys).to match_array(expected_files)
+ end
+ end
+
+ context 'when pipeline has multiple builds with coverage reports' do
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it_behaves_like 'having a coverage report'
+
+ context 'and it is a child pipeline' do
+ let!(:pipeline) { create(:ci_pipeline, :success, child_of: build(:ci_pipeline)) }
+
+ it 'returns empty coverage report' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ context 'when builds are retried' do
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', retried: true, pipeline: pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', retried: true, pipeline: pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it 'does not take retried builds into account' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'when pipeline does not have any builds with coverage reports' do
+ it 'returns empty coverage reports' do
+ expect(subject).to be_empty
+ end
+ end
+
+ context 'when pipeline has child pipeline with builds that have coverage reports' do
+ let!(:child_pipeline) { create(:ci_pipeline, :success, child_of: pipeline) }
+
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: child_pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: child_pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it_behaves_like 'having a coverage report'
+
+ context 'when feature flag ci_child_pipeline_coverage_reports is disabled' do
+ before do
+ stub_feature_flags(ci_child_pipeline_coverage_reports: false)
+ end
+
+ it 'returns empty coverage reports' do
+ expect(subject).to be_empty
+ end
+ end
+ end
+
+ context 'when both parent and child pipeline have builds with coverage reports' do
+ let!(:child_pipeline) { create(:ci_pipeline, :success, child_of: pipeline) }
+
+ let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline) }
+ let!(:build_golang) { create(:ci_build, :success, name: 'golang', pipeline: child_pipeline) }
+
+ before do
+ create(:ci_job_artifact, :cobertura, job: build_rspec)
+ create(:ci_job_artifact, :coverage_gocov_xml, job: build_golang)
+ end
+
+ it_behaves_like 'having a coverage report'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb b/spec/lib/gitlab/ci/reports/coverage_report_spec.rb
index 41ebae863ee..53646f7dfc0 100644
--- a/spec/lib/gitlab/ci/reports/coverage_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/coverage_report_spec.rb
@@ -2,11 +2,25 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::CoverageReports do
+RSpec.describe Gitlab::Ci::Reports::CoverageReport do
let(:coverage_report) { described_class.new }
it { expect(coverage_report.files).to eq({}) }
+ describe '#empty?' do
+ context 'when no file has been added' do
+ it { expect(coverage_report.empty?).to be(true) }
+ end
+
+ context 'when file has been added' do
+ before do
+ coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
+ end
+
+ it { expect(coverage_report.empty?).to be(false) }
+ end
+ end
+
describe '#pick' do
before do
coverage_report.add_file('app.rb', { 1 => 0, 2 => 1 })
diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
index f2b4e7573c0..0353432741b 100644
--- a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
+++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
@@ -51,21 +51,22 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
end
end
- context 'with Gitlab::VERSION set to 14.0.123' do
+ context 'with Gitlab::VERSION set to 14.0.1' do
before do
- stub_version('14.0.123', 'deadbeef')
+ stub_version('14.0.1', 'deadbeef')
described_class.instance.reset!
end
context 'with valid params' do
where(:runner_version, :expected_result) do
- 'v14.1.0-rc3' | :not_available # not available since the GitLab instance is still on 14.0.x
- 'v14.1.0~beta.1574.gf6ea9389' | :not_available # suffixes are correctly handled
- 'v14.1.0/1.1.0' | :not_available # suffixes are correctly handled
- 'v14.1.0' | :not_available # not available since the GitLab instance is still on 14.0.x
+ 'v15.0.0' | :not_available # not available since the GitLab instance is still on 14.x and a major version might be incompatible
+ 'v14.1.0-rc3' | :recommended # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
+ 'v14.1.0~beta.1574.gf6ea9389' | :recommended # suffixes are correctly handled
+ 'v14.1.0/1.1.0' | :recommended # suffixes are correctly handled
+ 'v14.1.0' | :recommended # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
'v14.0.1' | :recommended # recommended upgrade since 14.0.2 is available
- 'v14.0.2' | :not_available # not available since 14.0.2 is the latest 14.0.x release available
+ 'v14.0.2' | :not_available # not available since 14.0.2 is the latest 14.0.x release available within the instance's major.minor version
'v13.10.1' | :available # available upgrade: 14.1.1
'v13.10.1~beta.1574.gf6ea9389' | :available # suffixes are correctly handled
'v13.10.1/1.1.0' | :available # suffixes are correctly handled
diff --git a/spec/lib/gitlab/ci/status/build/play_spec.rb b/spec/lib/gitlab/ci/status/build/play_spec.rb
index bb406623d2f..ade07a54877 100644
--- a/spec/lib/gitlab/ci/status/build/play_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/play_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Status::Build::Play do
- let(:user) { create(:user) }
- let(:project) { create(:project, :stubbed_repository) }
- let(:build) { create(:ci_build, :manual, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :stubbed_repository) }
+ let_it_be_with_refind(:build) { create(:ci_build, :manual, project: project) }
+
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
subject { described_class.new(status) }
diff --git a/spec/lib/gitlab/ci/status/build/scheduled_spec.rb b/spec/lib/gitlab/ci/status/build/scheduled_spec.rb
index b0cd1ac4dc5..a9f9b82767e 100644
--- a/spec/lib/gitlab/ci/status/build/scheduled_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/scheduled_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Status::Build::Scheduled do
- let(:user) { create(:user) }
- let(:project) { create(:project, :stubbed_repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :stubbed_repository) }
+
let(:build) { create(:ci_build, :scheduled, project: project) }
let(:status) { Gitlab::Ci::Status::Core.new(build, user) }
diff --git a/spec/lib/gitlab/ci/trace/archive_spec.rb b/spec/lib/gitlab/ci/trace/archive_spec.rb
index 5e965f94347..3ae0e5d1f0e 100644
--- a/spec/lib/gitlab/ci/trace/archive_spec.rb
+++ b/spec/lib/gitlab/ci/trace/archive_spec.rb
@@ -29,35 +29,59 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
let(:stream) { StringIO.new(trace, 'rb') }
let(:src_checksum) { Digest::MD5.hexdigest(trace) }
- context 'when the object store is disabled' do
- before do
- stub_artifacts_object_storage(enabled: false)
+ shared_examples 'valid' do
+ it 'does not count as invalid' do
+ subject.execute!(stream)
+
+ expect(metrics)
+ .not_to have_received(:increment_error_counter)
+ .with(error_reason: :archive_invalid_checksum)
end
+ end
- it 'skips validation' do
+ shared_examples 'local checksum only' do
+ it 'generates only local checksum' do
subject.execute!(stream)
+
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to be_nil
- expect(metrics)
- .not_to have_received(:increment_error_counter)
- .with(error_reason: :archive_invalid_checksum)
end
end
- context 'with background_upload enabled' do
+ shared_examples 'skips validations' do
+ it_behaves_like 'valid'
+ it_behaves_like 'local checksum only'
+ end
+
+ shared_context 'with FIPS' do
+ context 'with FIPS enabled', :fips_mode do
+ it_behaves_like 'valid'
+
+ it 'does not generate md5 checksums' do
+ subject.execute!(stream)
+
+ expect(trace_metadata.checksum).to be_nil
+ expect(trace_metadata.remote_checksum).to be_nil
+ end
+ end
+ end
+
+ context 'when the object store is disabled' do
before do
- stub_artifacts_object_storage(background_upload: true)
+ stub_artifacts_object_storage(enabled: false)
end
- it 'skips validation' do
- subject.execute!(stream)
+ it_behaves_like 'skips validations'
+ include_context 'with FIPS'
+ end
- expect(trace_metadata.checksum).to eq(src_checksum)
- expect(trace_metadata.remote_checksum).to be_nil
- expect(metrics)
- .not_to have_received(:increment_error_counter)
- .with(error_reason: :archive_invalid_checksum)
+ context 'with background_upload enabled' do
+ before do
+ stub_artifacts_object_storage(background_upload: true)
end
+
+ it_behaves_like 'skips validations'
+ include_context 'with FIPS'
end
context 'with direct_upload enabled' do
@@ -65,27 +89,26 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
stub_artifacts_object_storage(direct_upload: true)
end
- it 'validates the archived trace' do
+ it_behaves_like 'valid'
+
+ it 'checksums match' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
expect(trace_metadata.remote_checksum).to eq(src_checksum)
- expect(metrics)
- .not_to have_received(:increment_error_counter)
- .with(error_reason: :archive_invalid_checksum)
end
context 'when the checksum does not match' do
let(:invalid_remote_checksum) { SecureRandom.hex }
before do
- expect(::Gitlab::Ci::Trace::RemoteChecksum)
+ allow(::Gitlab::Ci::Trace::RemoteChecksum)
.to receive(:new)
.with(an_instance_of(Ci::JobArtifact))
.and_return(double(md5_checksum: invalid_remote_checksum))
end
- it 'validates the archived trace' do
+ it 'counts as invalid' do
subject.execute!(stream)
expect(trace_metadata.checksum).to eq(src_checksum)
@@ -94,7 +117,11 @@ RSpec.describe Gitlab::Ci::Trace::Archive do
.to have_received(:increment_error_counter)
.with(error_reason: :archive_invalid_checksum)
end
+
+ include_context 'with FIPS'
end
+
+ include_context 'with FIPS'
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index e13a0993fa8..b0704ad7f50 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -64,6 +64,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
value: project.path },
{ key: 'CI_PROJECT_TITLE',
value: project.title },
+ { key: 'CI_PROJECT_DESCRIPTION',
+ value: project.description },
{ key: 'CI_PROJECT_PATH',
value: project.full_path },
{ key: 'CI_PROJECT_PATH_SLUG',
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 1910057622b..3dd9ca35881 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -7,7 +7,7 @@ module Gitlab
RSpec.describe YamlProcessor do
include StubRequests
- subject { described_class.new(config, user: nil).execute }
+ subject(:processor) { described_class.new(config, user: nil).execute }
shared_examples 'returns errors' do |error_message|
it 'adds a message when an error is encountered' do
@@ -965,6 +965,51 @@ module Gitlab
})
end
end
+
+ context 'when image has pull_policy' do
+ let(:config) do
+ <<~YAML
+ image:
+ name: ruby:2.7
+ pull_policy: if-not-present
+
+ test:
+ script: exit 0
+ YAML
+ end
+
+ it { is_expected.to be_valid }
+
+ it "returns image and service when defined" do
+ expect(processor.stage_builds_attributes("test")).to contain_exactly({
+ stage: "test",
+ stage_idx: 2,
+ name: "test",
+ only: { refs: %w[branches tags] },
+ options: {
+ script: ["exit 0"],
+ image: { name: "ruby:2.7", pull_policy: ["if-not-present"] }
+ },
+ allow_failure: false,
+ when: "on_success",
+ job_variables: [],
+ root_variables_inheritance: true,
+ scheduling_type: :stage
+ })
+ end
+
+ context 'when the feature flag ci_docker_image_pull_policy is disabled' do
+ before do
+ stub_feature_flags(ci_docker_image_pull_policy: false)
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it "returns no job" do
+ expect(processor.jobs).to eq({})
+ end
+ end
+ end
end
describe 'Variables' do
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index 2df85434f0e..109e83be294 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -178,6 +178,16 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
expect(directives['connect_src']).not_to include(snowplow_micro_url)
end
end
+
+ context 'when REVIEW_APPS_ENABLED is set' do
+ before do
+ stub_env('REVIEW_APPS_ENABLED', 'true')
+ end
+
+ it 'adds gitlab-org/gitlab merge requests API endpoint to CSP' do
+ expect(directives['connect_src']).to include('https://gitlab.com/api/v4/projects/278964/merge_requests/')
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/daemon_spec.rb b/spec/lib/gitlab/daemon_spec.rb
index 4d11b0bdc6c..14846e77a0c 100644
--- a/spec/lib/gitlab/daemon_spec.rb
+++ b/spec/lib/gitlab/daemon_spec.rb
@@ -34,6 +34,43 @@ RSpec.describe Gitlab::Daemon do
end
end
+ describe '.initialize_instance' do
+ before do
+ allow(Kernel).to receive(:at_exit)
+ end
+
+ after do
+ described_class.instance_variable_set(:@instance, nil)
+ end
+
+ it 'provides instance of Daemon' do
+ expect(described_class.instance).to be_instance_of(described_class)
+ end
+
+ context 'when instance has already been created' do
+ before do
+ described_class.instance
+ end
+
+ context 'and recreate flag is false' do
+ it 'raises an error' do
+ expect { described_class.initialize_instance }.to raise_error(/singleton instance already initialized/)
+ end
+ end
+
+ context 'and recreate flag is true' do
+ it 'calls stop on existing instance and returns new instance' do
+ old_instance = described_class.instance
+ expect(old_instance).to receive(:stop)
+
+ new_instance = described_class.initialize_instance(recreate: true)
+
+ expect(new_instance.object_id).not_to eq(old_instance.object_id)
+ end
+ end
+ end
+ end
+
context 'when Daemon is enabled' do
before do
allow(subject).to receive(:enabled?).and_return(true)
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index 8b57da8e60b..c2bd20798f1 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -8,11 +8,11 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
let_it_be_with_reload(:pipeline) do
create(:ci_pipeline,
- project: project,
- status: 'success',
- sha: project.commit.sha,
- ref: project.default_branch,
- user: user)
+ project: project,
+ status: 'success',
+ sha: project.commit.sha,
+ ref: project.default_branch,
+ user: user)
end
let!(:build) { create(:ci_build, pipeline: pipeline) }
@@ -48,6 +48,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
avatar_url: user.avatar_url(only_path: false),
email: user.public_email
})
+ expect(data[:source_pipeline]).to be_nil
end
context 'build with runner' do
@@ -132,6 +133,34 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
end
end
+ context 'when the pipeline has an upstream' do
+ let(:source_pipeline_attrs) { data[:source_pipeline] }
+
+ shared_examples 'source pipeline attributes' do
+ it 'has source pipeline attributes', :aggregate_failures do
+ expect(source_pipeline_attrs[:pipeline_id]).to eq upstream_pipeline.id
+ expect(source_pipeline_attrs[:job_id]).to eq pipeline.source_bridge.id
+ expect(source_pipeline_attrs[:project][:id]).to eq upstream_pipeline.project.id
+ expect(source_pipeline_attrs[:project][:web_url]).to eq upstream_pipeline.project.web_url
+ expect(source_pipeline_attrs[:project][:path_with_namespace]).to eq upstream_pipeline.project.full_path
+ end
+ end
+
+ context 'in same project' do
+ let!(:upstream_pipeline) { create(:ci_pipeline, upstream_of: pipeline, project: project) }
+
+ it_behaves_like 'source pipeline attributes'
+ end
+
+ context 'in different project' do
+ let!(:upstream_pipeline) { create(:ci_pipeline, upstream_of: pipeline) }
+
+ it_behaves_like 'source pipeline attributes'
+
+ it { expect(source_pipeline_attrs[:project][:id]).not_to eq pipeline.project.id }
+ end
+ end
+
context 'avoids N+1 database queries' do
it "with multiple builds" do
# Preparing the pipeline with the minimal builds
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index f147e8204e6..97459d4a7be 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -311,6 +311,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
let(:table_name) { :_test_batched_migrations_test_table }
let(:column_name) { :some_id }
let(:job_arguments) { [:some_id, :some_id_convert_to_bigint] }
+ let(:gitlab_schemas) { Gitlab::Database.gitlab_schemas_for_connection(connection) }
let(:migration_status) { :active }
@@ -358,7 +359,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'completes the migration' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
.and_return(batched_migration)
expect(batched_migration).to receive(:finalize!).and_call_original
@@ -399,7 +400,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'is a no-op' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, job_arguments)
.and_return(batched_migration)
configuration = {
@@ -426,7 +427,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
context 'when the migration does not exist' do
it 'is a no-op' do
expect(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:find_for_configuration)
- .with('CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
+ .with(gitlab_schemas, 'CopyColumnUsingBackgroundMigrationJob', table_name, column_name, [:some, :other, :arguments])
.and_return(nil)
configuration = {
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index a1c979bba50..8819171cfd0 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -78,23 +78,41 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '.active_migration' do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
let!(:migration1) { create(:batched_background_migration, :finished) }
- context 'without migrations on hold' do
+ subject(:active_migration) { described_class.active_migration(connection: connection) }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ context 'when there are no migrations on hold' do
let!(:migration2) { create(:batched_background_migration, :active) }
let!(:migration3) { create(:batched_background_migration, :active) }
it 'returns the first active migration according to queue order' do
- expect(described_class.active_migration).to eq(migration2)
+ expect(active_migration).to eq(migration2)
end
end
- context 'with migrations are on hold' do
+ context 'when there are migrations on hold' do
let!(:migration2) { create(:batched_background_migration, :active, on_hold_until: 10.minutes.from_now) }
let!(:migration3) { create(:batched_background_migration, :active, on_hold_until: 2.minutes.ago) }
it 'returns the first active migration that is not on hold according to queue order' do
- expect(described_class.active_migration).to eq(migration3)
+ expect(active_migration).to eq(migration3)
+ end
+ end
+
+ context 'when there are migrations not available for the current connection' do
+ let!(:migration2) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_not_existing) }
+ let!(:migration3) { create(:batched_background_migration, :active, gitlab_schema: :gitlab_main) }
+
+ it 'returns the first active migration that is available for the current connection' do
+ expect(active_migration).to eq(migration3)
end
end
end
@@ -553,25 +571,43 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '.for_configuration' do
- let!(:migration) do
- create(
- :batched_background_migration,
+ let!(:attributes) do
+ {
job_class_name: 'MyJobClass',
table_name: :projects,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
- )
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_main
+ }
end
+ let!(:migration) { create(:batched_background_migration, attributes) }
+
before do
- create(:batched_background_migration, job_class_name: 'OtherClass')
- create(:batched_background_migration, table_name: 'other_table')
- create(:batched_background_migration, column_name: 'other_column')
- create(:batched_background_migration, job_arguments: %w[other arguments])
+ create(:batched_background_migration, attributes.merge(job_class_name: 'OtherClass'))
+ create(:batched_background_migration, attributes.merge(table_name: 'other_table'))
+ create(:batched_background_migration, attributes.merge(column_name: 'other_column'))
+ create(:batched_background_migration, attributes.merge(job_arguments: %w[other arguments]))
end
it 'finds the migration matching the given configuration parameters' do
- actual = described_class.for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+ actual = described_class.for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+
+ expect(actual).to contain_exactly(migration)
+ end
+
+ it 'filters by gitlab schemas available for the connection' do
+ actual = described_class.for_configuration(:gitlab_ci, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
+
+ expect(actual).to be_empty
+ end
+
+ it 'doesn not filter by gitlab schemas available for the connection if the column is nor present' do
+ skip_if_multiple_databases_not_setup
+
+ expect(described_class).to receive(:gitlab_schema_column_exists?).and_return(false)
+
+ actual = described_class.for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])
expect(actual).to contain_exactly(migration)
end
@@ -579,7 +615,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '.find_for_configuration' do
it 'returns nill if such migration does not exists' do
- expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
+ expect(described_class.find_for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to be_nil
end
it 'returns the migration when it exists' do
@@ -588,10 +624,25 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
job_class_name: 'MyJobClass',
table_name: :projects,
column_name: :id,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_main
)
- expect(described_class.find_for_configuration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ expect(described_class.find_for_configuration(:gitlab_main, 'MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]])).to eq(migration)
+ end
+ end
+
+ describe '.for_gitlab_schema' do
+ let!(:migration) { create(:batched_background_migration, gitlab_schema: :gitlab_main) }
+
+ before do
+ create(:batched_background_migration, gitlab_schema: :gitlab_not_existing)
+ end
+
+ it 'finds the migrations matching the given gitlab schema' do
+ actual = described_class.for_gitlab_schema(:gitlab_main)
+
+ expect(actual).to contain_exactly(migration)
end
end
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 028bdce852e..811d4fad95c 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -384,4 +384,58 @@ RSpec.describe Gitlab::Database::BatchCount do
subject { described_class.method(:batch_sum) }
end
end
+
+ describe '#batch_average' do
+ let(:model) { Issue }
+ let(:column) { :weight }
+
+ before do
+ Issue.update_all(weight: 2)
+ end
+
+ it 'returns the average of values in the given column' do
+ expect(described_class.batch_average(model, column)).to eq(2)
+ end
+
+ it 'works when given an Arel column' do
+ expect(described_class.batch_average(model, model.arel_table[column])).to eq(2)
+ end
+
+ it 'works with a batch size of 50K' do
+ expect(described_class.batch_average(model, column, batch_size: 50_000)).to eq(2)
+ end
+
+ it 'works with start and finish provided' do
+ expect(described_class.batch_average(model, column, start: model.minimum(:id), finish: model.maximum(:id))).to eq(2)
+ end
+
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE)
+
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
+
+ described_class.batch_average(model, column)
+ end
+
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_average(model, column) }
+ end
+
+ it_behaves_like 'disallowed configurations', :batch_average do
+ let(:args) { [model, column] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE }
+ let(:small_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_AVERAGE_BATCH_SIZE - 1 }
+ end
+
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :itself }
+ let(:operation) { :average }
+ let(:operation_args) { [column] }
+
+ subject { described_class.method(:batch_average) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb
index 191f7017b4c..8345cdfb8fb 100644
--- a/spec/lib/gitlab/database/each_database_spec.rb
+++ b/spec/lib/gitlab/database/each_database_spec.rb
@@ -61,7 +61,11 @@ RSpec.describe Gitlab::Database::EachDatabase do
context 'when shared connections are not included' do
it 'only yields the unshared connections' do
- expect(Gitlab::Database).to receive(:db_config_share_with).twice.and_return(nil, 'main')
+ if Gitlab::Database.has_config?(:ci)
+ expect(Gitlab::Database).to receive(:db_config_share_with).exactly(3).times.and_return(nil, 'main', 'main')
+ else
+ expect(Gitlab::Database).to receive(:db_config_share_with).twice.and_return(nil, 'main')
+ end
expect { |b| described_class.each_database_connection(include_shared: false, &b) }
.to yield_successive_args([ActiveRecord::Base.connection, 'main'])
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index a5a67c2c918..611b2fbad72 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::GitlabSchema do
it 'all tables have assigned a known gitlab_schema' do
is_expected.to all(
- match([be_a(String), be_in([:gitlab_shared, :gitlab_main, :gitlab_ci])])
+ match([be_a(String), be_in([:gitlab_internal, :gitlab_shared, :gitlab_main, :gitlab_ci])])
)
end
@@ -42,12 +42,12 @@ RSpec.describe Gitlab::Database::GitlabSchema do
where(:name, :classification) do
'ci_builds' | :gitlab_ci
'my_schema.ci_builds' | :gitlab_ci
- 'information_schema.columns' | :gitlab_shared
+ 'information_schema.columns' | :gitlab_internal
'audit_events_part_5fc467ac26' | :gitlab_main
'_test_gitlab_main_table' | :gitlab_main
'_test_gitlab_ci_table' | :gitlab_ci
'_test_my_table' | :gitlab_shared
- 'pg_attribute' | :gitlab_shared
+ 'pg_attribute' | :gitlab_internal
'my_other_table' | :undefined_my_other_table
end
diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
index 77284b4d128..34370c9a21f 100644
--- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb
@@ -100,14 +100,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
expect(config.pool_size).to eq(4)
end
end
-
- it 'calls reuse_primary_connection!' do
- expect_next_instance_of(described_class) do |subject|
- expect(subject).to receive(:reuse_primary_connection!).and_call_original
- end
-
- described_class.for_model(model)
- end
end
describe '#load_balancing_enabled?' do
@@ -203,61 +195,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration, :request_store do
end
end
- describe '#replica_db_config' do
+ describe '#db_config' do
let(:model) { double(:model, connection_db_config: db_config, connection_specification_name: 'Ci::ApplicationRecord') }
let(:config) { described_class.for_model(model) }
it 'returns exactly db_config' do
- expect(config.replica_db_config).to eq(db_config)
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- it 'does not change replica_db_config' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
-
- expect(config.replica_db_config).to eq(db_config)
- end
- end
- end
-
- describe 'reuse_primary_connection!' do
- let(:model) { double(:model, connection_db_config: db_config, connection_specification_name: 'Ci::ApplicationRecord') }
- let(:config) { described_class.for_model(model) }
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_* not configured' do
- it 'the primary connection uses default specification' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
-
- expect(config.primary_connection_specification_name).to eq('Ci::ApplicationRecord')
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- before do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
- end
-
- it 'the primary connection uses main connection' do
- expect(config.primary_connection_specification_name).to eq('ActiveRecord::Base')
- end
-
- context 'when force_no_sharing_primary_model feature flag is enabled' do
- before do
- stub_feature_flags(force_no_sharing_primary_model: true)
- end
-
- it 'the primary connection uses ci connection' do
- expect(config.primary_connection_specification_name).to eq('Ci::ApplicationRecord')
- end
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=unknown' do
- it 'raises exception' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'unknown')
-
- expect { config.reuse_primary_connection! }.to raise_error /Invalid value for/
- end
+ expect(config.db_config).to eq(db_config)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
index ee2718171c0..41312dbedd6 100644
--- a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb
@@ -79,42 +79,55 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do
end
end
- describe '.insert_all!' do
+ describe 'methods using exec_insert_all on the connection', :request_store do
+ let(:model_class) do
+ Class.new(ApplicationRecord) do
+ self.table_name = "_test_connection_proxy_insert_all"
+ end
+ end
+
+ let(:session) { Gitlab::Database::LoadBalancing::Session.new }
+
before do
ActiveRecord::Schema.define do
- create_table :_test_connection_proxy_bulk_insert, force: true do |t|
- t.string :name, null: true
+ create_table :_test_connection_proxy_insert_all, force: true do |t|
+ t.string :name, null: false
+ t.index :name, unique: true
end
end
+
+ allow(Gitlab::Database::LoadBalancing::Session).to receive(:current)
+ .and_return(session)
end
after do
ActiveRecord::Schema.define do
- drop_table :_test_connection_proxy_bulk_insert, force: true
+ drop_table :_test_connection_proxy_insert_all, force: true
end
end
- let(:model_class) do
- Class.new(ApplicationRecord) do
- self.table_name = "_test_connection_proxy_bulk_insert"
+ describe '#upsert' do
+ it 'upserts a record and marks the session to stick to the primary' do
+ expect { 2.times { model_class.upsert({ name: 'test' }, unique_by: :name) } }
+ .to change { model_class.count }.from(0).to(1)
+ .and change { session.use_primary? }.from(false).to(true)
end
end
- it 'inserts data in bulk' do
- expect(model_class).to receive(:connection)
- .at_least(:once)
- .and_return(proxy)
+ describe '#insert_all!' do
+ it 'inserts multiple records and marks the session to stick to the primary' do
+ expect { model_class.insert_all([{ name: 'one' }, { name: 'two' }]) }
+ .to change { model_class.count }.from(0).to(2)
+ .and change { session.use_primary? }.from(false).to(true)
+ end
+ end
- expect(proxy).to receive(:write_using_load_balancer)
- .at_least(:once)
- .and_call_original
-
- expect do
- model_class.insert_all! [
- { name: "item1" },
- { name: "item2" }
- ]
- end.to change { model_class.count }.by(2)
+ describe '#insert' do
+ it 'inserts a single record and marks the session to stick to the primary' do
+ expect { model_class.insert({ name: 'single' }) }
+ .to change { model_class.count }.from(0).to(1)
+ .and change { session.use_primary? }.from(false).to(true)
+ end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 3c7819c04b6..34eb64997c1 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -487,46 +487,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
end
- describe 'primary connection re-use', :reestablished_active_record_base, :add_ci_connection do
- let(:model) { Ci::ApplicationRecord }
-
- describe '#read' do
- it 'returns ci replica connection' do
- expect { |b| lb.read(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('ci_replica')
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- it 'returns ci replica connection' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
-
- expect { |b| lb.read(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('ci_replica')
- end
- end
- end
- end
-
- describe '#read_write' do
- it 'returns Ci::ApplicationRecord connection' do
- expect { |b| lb.read_write(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('ci')
- end
- end
-
- context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do
- it 'returns ActiveRecord::Base connection' do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
-
- expect { |b| lb.read_write(&b) }.to yield_with_args do |args|
- expect(args.pool.db_config.name).to eq('main')
- end
- end
- end
- end
- end
-
describe '#wal_diff' do
it 'returns the diff between two write locations' do
loc1 = lb.send(:get_write_location, lb.pool.connection)
diff --git a/spec/lib/gitlab/database/load_balancing/setup_spec.rb b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
index c44637b8d06..fa6d71bca7f 100644
--- a/spec/lib/gitlab/database/load_balancing/setup_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/setup_spec.rb
@@ -122,123 +122,68 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do
context 'uses correct base models', :reestablished_active_record_base do
using RSpec::Parameterized::TableSyntax
- where do
- {
- "it picks a dedicated CI connection" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil,
- request_store_active: false,
- ff_force_no_sharing_primary_model: false,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'ci' }
- }
- },
- "with re-use of primary connection it uses CI connection for reads" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main',
- request_store_active: false,
- ff_force_no_sharing_primary_model: false,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'main' }
- }
- },
- "with re-use and FF force_no_sharing_primary_model enabled with RequestStore it sticks FF and uses CI connection for reads and writes" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main',
- request_store_active: true,
- ff_force_no_sharing_primary_model: true,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'ci' }
- }
- },
- "with re-use and FF force_no_sharing_primary_model enabled without RequestStore it doesn't use FF and uses CI connection for reads only" => {
- env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main',
- request_store_active: true,
- ff_force_no_sharing_primary_model: false,
- expectations: {
- main: { read: 'main_replica', write: 'main' },
- ci: { read: 'ci_replica', write: 'main' }
- }
- }
- }
- end
-
- with_them do
- let(:ci_class) do
- Class.new(ActiveRecord::Base) do
- def self.name
- 'Ci::ApplicationRecordTemporary'
- end
-
- establish_connection ActiveRecord::DatabaseConfigurations::HashConfig.new(
- Rails.env,
- 'ci',
- ActiveRecord::Base.connection_db_config.configuration_hash
- )
+ let(:ci_class) do
+ Class.new(ActiveRecord::Base) do
+ def self.name
+ 'Ci::ApplicationRecordTemporary'
end
- end
- let(:models) do
- {
- main: ActiveRecord::Base,
- ci: ci_class
- }
+ establish_connection ActiveRecord::DatabaseConfigurations::HashConfig.new(
+ Rails.env,
+ 'ci',
+ ActiveRecord::Base.connection_db_config.configuration_hash
+ )
end
+ end
- around do |example|
- if request_store_active
- Gitlab::WithRequestStore.with_request_store do
- stub_feature_flags(force_no_sharing_primary_model: ff_force_no_sharing_primary_model)
- RequestStore.clear!
-
- example.run
- end
- else
- example.run
- end
- end
+ let(:models) do
+ {
+ main: ActiveRecord::Base,
+ ci: ci_class
+ }
+ end
- before do
- allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+ before do
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
- # Rewrite `class_attribute` to use rspec mocking and prevent modifying the objects
- allow_next_instance_of(described_class) do |setup|
- allow(setup).to receive(:configure_connection)
+ # Rewrite `class_attribute` to use rspec mocking and prevent modifying the objects
+ allow_next_instance_of(described_class) do |setup|
+ allow(setup).to receive(:configure_connection)
- allow(setup).to receive(:setup_class_attribute) do |attribute, value|
- allow(setup.model).to receive(attribute) { value }
- end
+ allow(setup).to receive(:setup_class_attribute) do |attribute, value|
+ allow(setup.model).to receive(attribute) { value }
end
+ end
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci)
-
- # Make load balancer to force init with a dedicated replicas connections
- models.each do |_, model|
- described_class.new(model).tap do |subject|
- subject.configuration.hosts = [subject.configuration.replica_db_config.host]
- subject.setup
- end
+ # Make load balancer to force init with a dedicated replicas connections
+ models.each do |_, model|
+ described_class.new(model).tap do |subject|
+ subject.configuration.hosts = [subject.configuration.db_config.host]
+ subject.setup
end
end
+ end
- it 'results match expectations' do
- result = models.transform_values do |model|
- load_balancer = model.connection.instance_variable_get(:@load_balancer)
-
- {
- read: load_balancer.read { |connection| connection.pool.db_config.name },
- write: load_balancer.read_write { |connection| connection.pool.db_config.name }
- }
- end
+ it 'results match expectations' do
+ result = models.transform_values do |model|
+ load_balancer = model.connection.instance_variable_get(:@load_balancer)
- expect(result).to eq(expectations)
+ {
+ read: load_balancer.read { |connection| connection.pool.db_config.name },
+ write: load_balancer.read_write { |connection| connection.pool.db_config.name }
+ }
end
- it 'does return load_balancer assigned to a given connection' do
- models.each do |name, model|
- expect(model.load_balancer.name).to eq(name)
- expect(model.sticking.instance_variable_get(:@load_balancer)).to eq(model.load_balancer)
- end
+ expect(result).to eq({
+ main: { read: 'main_replica', write: 'main' },
+ ci: { read: 'ci_replica', write: 'ci' }
+ })
+ end
+
+ it 'does return load_balancer assigned to a given connection' do
+ models.each do |name, model|
+ expect(model.load_balancer.name).to eq(name)
+ expect(model.sticking.instance_variable_get(:@load_balancer)).to eq(model.load_balancer)
end
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
index 9acf80e684f..b7915e6cf69 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb
@@ -57,6 +57,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to be_nil
+ expect(job['wal_location_source']).to be_nil
end
include_examples 'job data consistency'
@@ -96,6 +97,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to eq(expected_location)
+ expect(job['wal_location_source']).to eq(:replica)
end
include_examples 'job data consistency'
@@ -120,6 +122,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to eq(expected_location)
+ expect(job['wal_location_source']).to eq(:primary)
end
include_examples 'job data consistency'
@@ -162,6 +165,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do
run_middleware
expect(job['wal_locations']).to eq(wal_locations)
+ expect(job['wal_location_source']).to be_nil
end
end
diff --git a/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
new file mode 100644
index 00000000000..30e5fbbd803
--- /dev/null
+++ b/spec/lib/gitlab/database/load_balancing/transaction_leaking_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Load balancer behavior with errors inside a transaction', :redis, :delete do
+ let(:model) { ApplicationRecord }
+ let(:db_host) { model.connection_pool.db_config.host }
+
+ let(:test_table_name) { '_test_foo' }
+
+ before do
+ # Patch in our load balancer config, simply pointing at the test database twice
+ allow(Gitlab::Database::LoadBalancing::Configuration).to receive(:for_model) do |base_model|
+ Gitlab::Database::LoadBalancing::Configuration.new(base_model, [db_host, db_host])
+ end
+
+ Gitlab::Database::LoadBalancing::Setup.new(ApplicationRecord).setup
+
+ model.connection.execute(<<~SQL)
+ CREATE TABLE IF NOT EXISTS #{test_table_name} (id SERIAL PRIMARY KEY, value INTEGER)
+ SQL
+ end
+
+ after do
+ model.connection.execute(<<~SQL)
+ DROP TABLE IF EXISTS #{test_table_name}
+ SQL
+ end
+
+ def execute(conn)
+ conn.execute("INSERT INTO #{test_table_name} (value) VALUES (1)")
+ backend_pid = conn.execute("SELECT pg_backend_pid() AS pid").to_a.first['pid']
+
+ # This will result in a PG error, which is not raised.
+ # Instead, we retry the statement on a fresh connection (where the pid is different and it does nothing)
+ # and the load balancer continues with a fresh connection and no transaction if a transaction was open previously
+ conn.execute(<<~SQL)
+ SELECT CASE
+ WHEN pg_backend_pid() = #{backend_pid} THEN
+ pg_terminate_backend(#{backend_pid})
+ END
+ SQL
+
+ # This statement will execute on a new connection, and violate transaction semantics
+ # if we were in a transaction before
+ conn.execute("INSERT INTO #{test_table_name} (value) VALUES (2)")
+ end
+
+ it 'logs a warning when violating transaction semantics with writes' do
+ conn = model.connection
+
+ expect(::Gitlab::Database::LoadBalancing::Logger).to receive(:warn).with(hash_including(event: :transaction_leak))
+
+ conn.transaction do
+ expect(conn).to be_transaction_open
+
+ execute(conn)
+
+ expect(conn).not_to be_transaction_open
+ end
+
+ values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
+ expect(values).to contain_exactly(2) # Does not include 1 because the transaction was aborted and leaked
+ end
+
+ it 'does not log a warning when no transaction is open to be leaked' do
+ conn = model.connection
+
+ expect(::Gitlab::Database::LoadBalancing::Logger)
+ .not_to receive(:warn).with(hash_including(event: :transaction_leak))
+
+ expect(conn).not_to be_transaction_open
+
+ execute(conn)
+
+ expect(conn).not_to be_transaction_open
+
+ values = conn.execute("SELECT value FROM #{test_table_name}").to_a.map { |row| row['value'] }
+ expect(values).to contain_exactly(1, 2) # Includes both rows because there was no transaction to roll back
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb b/spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb
new file mode 100644
index 00000000000..57c51c9d9c2
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/announce_database_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::AnnounceDatabase do
+ let(:migration) do
+ ActiveRecord::Migration.new('MyMigration', 1111).extend(described_class)
+ end
+
+ describe '#announce' do
+ it 'prefixes message with database name' do
+ expect { migration.announce('migrating') }.to output(/^main: == 1111 MyMigration: migrating/).to_stdout
+ end
+ end
+
+ describe '#say' do
+ it 'prefixes message with database name' do
+ expect { migration.say('transaction_open?()') }.to output(/^main: -- transaction_open?()/).to_stdout
+ end
+
+ it 'prefixes subitem message with database name' do
+ expect { migration.say('0.0000s', true) }.to output(/^main: -> 0.0000s/).to_stdout
+ end
+ end
+
+ describe '#write' do
+ it 'does not prefix empty write' do
+ expect { migration.write }.to output(/^$/).to_stdout
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 04fe1fad10e..e09016b2b2b 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -2079,6 +2079,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
t.integer :other_id
t.timestamps
end
+
+ allow(model).to receive(:transaction_open?).and_return(false)
end
context 'when the target table does not exist' do
@@ -2191,6 +2193,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
t.timestamps
end
+ allow(model).to receive(:transaction_open?).and_return(false)
+
model.initialize_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
model.backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
end
@@ -2242,10 +2246,20 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
}
end
+ let(:migration_attributes) do
+ configuration.merge(gitlab_schema: Gitlab::Database.gitlab_schemas_for_connection(model.connection).first)
+ end
+
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
subject(:ensure_batched_background_migration_is_finished) { model.ensure_batched_background_migration_is_finished(**configuration) }
it 'raises an error when migration exists and is not marked as finished' do
- create(:batched_background_migration, :active, configuration)
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
+ create(:batched_background_migration, :active, migration_attributes)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
allow(runner).to receive(:finalize).with(job_class_name, table, column_name, job_arguments).and_return(false)
@@ -2255,7 +2269,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.to raise_error "Expected batched background migration for the given configuration to be marked as 'finished', but it is 'active':" \
"\t#{configuration}" \
"\n\n" \
- "Finalize it manually by running" \
+ "Finalize it manually by running the following command in a `bash` or `sh` shell:" \
"\n\n" \
"\tsudo gitlab-rake gitlab:background_migrations:finalize[CopyColumnUsingBackgroundMigrationJob,events,id,'[[\"id\"]\\,[\"id_convert_to_bigint\"]\\,null]']" \
"\n\n" \
@@ -2265,13 +2279,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'does not raise error when migration exists and is marked as finished' do
- create(:batched_background_migration, :finished, configuration)
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :finished, migration_attributes)
expect { ensure_batched_background_migration_is_finished }
.not_to raise_error
end
it 'logs a warning when migration does not exist' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ create(:batched_background_migration, :active, migration_attributes.merge(gitlab_schema: :gitlab_something_else))
+
expect(Gitlab::AppLogger).to receive(:warn)
.with("Could not find batched background migration for the given configuration: #{configuration}")
@@ -2280,6 +2300,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'finalizes the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!).twice
+
migration = create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
@@ -2291,6 +2313,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'when the flag finalize is false' do
it 'does not finalize the migration' do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
create(:batched_background_migration, :active, configuration)
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
@@ -3257,4 +3281,20 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.rename_constraint(:test_table, :fk_old_name, :fk_new_name)
end
end
+
+ describe '#drop_sequence' do
+ it "executes the statement to drop the sequence" do
+ expect(model).to receive(:execute).with /ALTER TABLE "test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "test_table_id_seq"/
+
+ model.drop_sequence(:test_table, :test_column, :test_table_id_seq)
+ end
+ end
+
+ describe '#add_sequence' do
+ it "executes the statement to add the sequence" do
+ expect(model).to receive(:execute).with "CREATE SEQUENCE \"test_table_id_seq\" START 1;\nALTER TABLE \"test_table\" ALTER COLUMN \"test_column\" SET DEFAULT nextval(\'test_table_id_seq\')\n"
+
+ model.add_sequence(:test_table, :test_column, :test_table_id_seq, 1)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index b0caa21e01a..c423340a572 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -444,7 +444,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does restore connection hierarchy' do
expect_next_instances_of(job_class, 1..) do |job|
expect(job).to receive(:perform) do
- validate_connections!
+ validate_connections_stack!
end
end
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index d1a66036149..f3414727245 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -3,8 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers do
+ let(:migration_class) do
+ Class.new(ActiveRecord::Migration[6.1])
+ .include(described_class)
+ .include(Gitlab::Database::Migrations::ReestablishedConnectionStack)
+ end
+
let(:migration) do
- ActiveRecord::Migration.new.extend(described_class)
+ migration_class.new
end
describe '#queue_batched_background_migration' do
@@ -12,6 +18,9 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
before do
allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ allow(migration).to receive(:transaction_open?).and_return(false)
end
context 'when such migration already exists' do
@@ -27,7 +36,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_class_name: 'MyBatchClass',
batch_size: 200,
sub_batch_size: 20,
- job_arguments: [[:id], [:id_convert_to_bigint]]
+ job_arguments: [[:id], [:id_convert_to_bigint]],
+ gitlab_schema: :gitlab_ci
)
expect do
@@ -41,7 +51,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_max_value: 1000,
batch_class_name: 'MyBatchClass',
batch_size: 100,
- sub_batch_size: 10)
+ sub_batch_size: 10,
+ gitlab_schema: :gitlab_ci)
end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
end
end
@@ -60,7 +71,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
batch_class_name: 'MyBatchClass',
batch_size: 100,
max_batch_size: 10000,
- sub_batch_size: 10)
+ sub_batch_size: 10,
+ gitlab_schema: :gitlab_ci)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
@@ -76,7 +88,8 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
sub_batch_size: 10,
job_arguments: %w[],
status_name: :active,
- total_tuple_count: pgclass_info.cardinality_estimate)
+ total_tuple_count: pgclass_info.cardinality_estimate,
+ gitlab_schema: 'gitlab_ci')
end
context 'when the job interval is lower than the minimum' do
@@ -160,6 +173,31 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished
end
+
+ context 'when within transaction' do
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(true)
+ end
+
+ it 'does raise an exception' do
+ expect { migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)}
+ .to raise_error /`queue_batched_background_migration` cannot be run inside a transaction./
+ end
+ end
+ end
+ end
+
+ context 'when gitlab_schema is not given' do
+ it 'fetches gitlab_schema from the migration context' do
+ expect(migration).to receive(:gitlab_schema_from_context).and_return(:gitlab_ci)
+
+ expect do
+ migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
+
+ expect(created_migration.gitlab_schema).to eq('gitlab_ci')
end
end
end
@@ -167,6 +205,12 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
describe '#finalize_batched_background_migration' do
let!(:batched_migration) { create(:batched_background_migration, job_class_name: 'MyClass', table_name: :projects, column_name: :id, job_arguments: []) }
+ before do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ end
+
it 'finalizes the migration' do
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
expect(runner).to receive(:finalize).with('MyClass', :projects, :id, [])
@@ -183,24 +227,162 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
end
- context 'when uses a CI connection', :reestablished_active_record_base do
+ context 'when within transaction' do
before do
- skip_if_multiple_databases_not_setup
+ allow(migration).to receive(:transaction_open?).and_return(true)
+ end
- ActiveRecord::Base.establish_connection(:ci) # rubocop:disable Database/EstablishConnection
+ it 'does raise an exception' do
+ expect { migration.finalize_batched_background_migration(job_class_name: 'MyJobClass', table_name: :projects, column_name: :id, job_arguments: []) }
+ .to raise_error /`finalize_batched_background_migration` cannot be run inside a transaction./
end
+ end
- it 'raises an exception' do
- ci_migration = create(:batched_background_migration, :active)
+ context 'when running migration in reconfigured ActiveRecord::Base context' do
+ it_behaves_like 'reconfigures connection stack', 'ci' do
+ before do
+ create(:batched_background_migration,
+ job_class_name: 'Ci::MyClass',
+ table_name: :ci_builds,
+ column_name: :id,
+ job_arguments: [],
+ gitlab_schema: :gitlab_ci)
+ end
+
+ context 'when restrict_gitlab_migration is set to gitlab_ci' do
+ it 'finalizes the migration' do
+ migration_class.include(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema)
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_ci
+
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).to receive(:finalize).with('Ci::MyClass', :ci_builds, :id, []) do
+ validate_connections_stack!
+ end
+ end
+
+ migration.finalize_batched_background_migration(
+ job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
+ end
+ end
+
+ context 'when restrict_gitlab_migration is set to gitlab_main' do
+ it 'does not find any migrations' do
+ migration_class.include(Gitlab::Database::MigrationHelpers::RestrictGitlabSchema)
+ migration_class.restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ expect do
+ migration.finalize_batched_background_migration(
+ job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
+ end.to raise_error /Could not find batched background migration/
+ end
+ end
+
+ context 'when no restrict is set' do
+ it 'does not find any migrations' do
+ expect do
+ migration.finalize_batched_background_migration(
+ job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
+ end.to raise_error /Could not find batched background migration/
+ end
+ end
+ end
+ end
+
+ context 'when within transaction' do
+ before do
+ allow(migration).to receive(:transaction_open?).and_return(true)
+ end
+
+ it 'does raise an exception' do
+ expect { migration.finalize_batched_background_migration(job_class_name: 'MyJobClass', table_name: :projects, column_name: :id, job_arguments: []) }
+ .to raise_error /`finalize_batched_background_migration` cannot be run inside a transaction./
+ end
+ end
+ end
+
+ describe '#delete_batched_background_migration' do
+ let(:transaction_open) { false }
+
+ before do
+ expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
+
+ allow(migration).to receive(:transaction_open?).and_return(transaction_open)
+ end
+
+ context 'when migration exists' do
+ it 'deletes it' do
+ create(
+ :batched_background_migration,
+ job_class_name: 'MyJobClass',
+ table_name: :projects,
+ column_name: :id,
+ interval: 10.minutes,
+ min_value: 5,
+ max_value: 1005,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 200,
+ sub_batch_size: 20,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
expect do
- migration.finalize_batched_background_migration(
- job_class_name: ci_migration.job_class_name,
- table_name: ci_migration.table_name,
- column_name: ci_migration.column_name,
- job_arguments: ci_migration.job_arguments
- )
- end.to raise_error /is currently not supported when running in decomposed/
+ migration.delete_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ [[:id], [:id_convert_to_bigint]])
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.from(1).to(0)
+ end
+ end
+
+ context 'when migration does not exist' do
+ it 'does nothing' do
+ create(
+ :batched_background_migration,
+ job_class_name: 'SomeOtherJobClass',
+ table_name: :projects,
+ column_name: :id,
+ interval: 10.minutes,
+ min_value: 5,
+ max_value: 1005,
+ batch_class_name: 'MyBatchClass',
+ batch_size: 200,
+ sub_batch_size: 20,
+ job_arguments: [[:id], [:id_convert_to_bigint]]
+ )
+
+ expect do
+ migration.delete_batched_background_migration(
+ 'MyJobClass',
+ :projects,
+ :id,
+ [[:id], [:id_convert_to_bigint]])
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }
+ end
+ end
+
+ context 'when within transaction' do
+ let(:transaction_open) { true }
+
+ it 'raises an exception' do
+ expect { migration.delete_batched_background_migration('MyJobClass', :projects, :id, [[:id], [:id_convert_to_bigint]]) }
+ .to raise_error /`#delete_batched_background_migration` cannot be run inside a transaction./
+ end
+ end
+ end
+
+ describe '#gitlab_schema_from_context' do
+ context 'when allowed_gitlab_schemas is not available' do
+ it 'defaults to :gitlab_main' do
+ expect(migration.gitlab_schema_from_context).to eq(:gitlab_main)
+ end
+ end
+
+ context 'when allowed_gitlab_schemas is available' do
+ it 'uses schema from allowed_gitlab_schema' do
+ expect(migration).to receive(:allowed_gitlab_schemas).and_return([:gitlab_ci])
+
+ expect(migration.gitlab_schema_from_context).to eq(:gitlab_ci)
end
end
end
diff --git a/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb b/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
index cfb308c63e4..d197f39be40 100644
--- a/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
+++ b/spec/lib/gitlab/database/migrations/reestablished_connection_stack_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::Migrations::ReestablishedConnectionStack do
it_behaves_like "reconfigures connection stack", db_config_name do
it 'does restore connection hierarchy' do
model.with_restored_connection_stack do
- validate_connections!
+ validate_connections_stack!
end
end
diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
index fbfff1268cc..2f3d44f6f8f 100644
--- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freeze_time do
include Gitlab::Database::MigrationHelpers
- include Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers
include Database::MigrationTestingHelpers
let(:result_dir) { Dir.mktmpdir }
@@ -13,6 +12,10 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
FileUtils.rm_rf(result_dir)
end
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers)
+ end
+
let(:connection) { ApplicationRecord.connection }
let(:table_name) { "_test_column_copying"}
@@ -26,11 +29,13 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
insert into #{table_name} (id) select i from generate_series(1, 1000) g(i);
SQL
+
+ allow(migration).to receive(:transaction_open?).and_return(false)
end
context 'running a real background migration' do
it 'runs sampled jobs from the batched background migration' do
- queue_batched_background_migration('CopyColumnUsingBackgroundMigrationJob',
+ migration.queue_batched_background_migration('CopyColumnUsingBackgroundMigrationJob',
table_name, :id,
:id, :data,
batch_size: 100,
@@ -46,7 +51,9 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
let(:migration_name) { 'TestBackgroundMigration' }
before do
- queue_batched_background_migration(migration_name, table_name, :id, job_interval: 5.minutes, batch_size: 100)
+ migration.queue_batched_background_migration(
+ migration_name, table_name, :id, job_interval: 5.minutes, batch_size: 100
+ )
end
it 'samples jobs' do
@@ -67,13 +74,13 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
travel 3.days
new_migration = define_background_migration('NewMigration') { travel 1.second }
- queue_batched_background_migration('NewMigration', table_name, :id,
+ migration.queue_batched_background_migration('NewMigration', table_name, :id,
job_interval: 5.minutes,
batch_size: 10,
sub_batch_size: 5)
other_new_migration = define_background_migration('NewMigration2') { travel 2.seconds }
- queue_batched_background_migration('NewMigration2', table_name, :id,
+ migration.queue_batched_background_migration('NewMigration2', table_name, :id,
job_interval: 5.minutes,
batch_size: 10,
sub_batch_size: 5)
diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
index 64dcdb9628a..dca4548a0a3 100644
--- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb
@@ -8,7 +8,11 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
def has_partition(model, month)
Gitlab::Database::PostgresPartition.for_parent_table(model.table_name).any? do |partition|
- Gitlab::Database::Partitioning::TimePartition.from_sql(model.table_name, partition.name, partition.condition).from == month
+ Gitlab::Database::Partitioning::TimePartition.from_sql(
+ model.table_name,
+ partition.name,
+ partition.condition
+ ).from == month
end
end
@@ -16,14 +20,17 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
subject(:sync_partitions) { described_class.new(model).sync_partitions }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
- let(:partitioning_strategy) { double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "issues" }
+ let(:partitioning_strategy) do
+ double(missing_partitions: partitions, extra_partitions: [], after_adding_partitions: nil)
+ end
before do
allow(connection).to receive(:table_exists?).and_call_original
allow(connection).to receive(:table_exists?).with(table).and_return(true)
allow(connection).to receive(:execute).and_call_original
+ expect(partitioning_strategy).to receive(:validate_and_fix)
stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
end
@@ -84,13 +91,16 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
let(:manager) { described_class.new(model) }
let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table, connection: connection) }
- let(:partitioning_strategy) { double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil) }
let(:connection) { ActiveRecord::Base.connection }
let(:table) { "foo" }
+ let(:partitioning_strategy) do
+ double(extra_partitions: extra_partitions, missing_partitions: [], after_adding_partitions: nil)
+ end
before do
allow(connection).to receive(:table_exists?).and_call_original
allow(connection).to receive(:table_exists?).with(table).and_return(true)
+ expect(partitioning_strategy).to receive(:validate_and_fix)
stub_exclusive_lease(described_class::MANAGEMENT_LEASE_KEY % table, timeout: described_class::LEASE_TIMEOUT)
end
@@ -107,6 +117,24 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do
sync_partitions
end
+
+ it 'logs an error if the partitions are not detachable' do
+ allow(Gitlab::Database::PostgresForeignKey).to receive(:by_referenced_table_identifier).with("public.foo")
+ .and_return([double(name: "fk_1", constrained_table_identifier: "public.constrainted_table_1")])
+
+ expect(Gitlab::AppLogger).to receive(:error).with(
+ {
+ message: "Failed to create / detach partition(s)",
+ connection_name: "main",
+ exception_class: Gitlab::Database::Partitioning::PartitionManager::UnsafeToDetachPartitionError,
+ exception_message:
+ "Cannot detach foo1, it would block while checking foreign key fk_1 on public.constrainted_table_1",
+ table_name: "foo"
+ }
+ )
+
+ sync_partitions
+ end
end
describe '#detach_partitions' do
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index 1cec0463055..d8b06ee1a5d 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -37,12 +37,75 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
describe '#current_partitions' do
it 'detects both partitions' do
expect(strategy.current_partitions).to eq([
- Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 1, partition_name: '_test_partitioned_test_1'),
- Gitlab::Database::Partitioning::SingleNumericListPartition.new(table_name, 2, partition_name: '_test_partitioned_test_2')
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(
+ table_name, 1, partition_name: '_test_partitioned_test_1'
+ ),
+ Gitlab::Database::Partitioning::SingleNumericListPartition.new(
+ table_name, 2, partition_name: '_test_partitioned_test_2'
+ )
])
end
end
+ describe '#validate_and_fix' do
+ context 'feature flag is disabled' do
+ before do
+ stub_feature_flags(fix_sliding_list_partitioning: false)
+ end
+
+ it 'does not try to fix the default partition value' do
+ connection.change_column_default(model.table_name, strategy.partitioning_key, 3)
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+ strategy.validate_and_fix
+ end
+ end
+
+ context 'feature flag is enabled' do
+ before do
+ stub_feature_flags(fix_sliding_list_partitioning: true)
+ end
+
+ it 'does not call change_column_default if the partitioning in a valid state' do
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+
+ strategy.validate_and_fix
+ end
+
+ it 'calls change_column_default on partition_key with the most default partition number' do
+ connection.change_column_default(model.table_name, strategy.partitioning_key, 1)
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Fixed default value of sliding_list_strategy partitioning_key',
+ connection_name: 'main',
+ old_value: 1,
+ new_value: 2,
+ table_name: table_name,
+ column: strategy.partitioning_key
+ )
+
+ expect(strategy.model.connection).to receive(:change_column_default).with(
+ model.table_name, strategy.partitioning_key, 2
+ ).and_call_original
+
+ strategy.validate_and_fix
+ end
+
+ it 'does not change the default column if it has been changed in the meanwhile by another process' do
+ expect(strategy).to receive(:current_default_value).and_return(1, 2)
+
+ expect(strategy.model.connection).not_to receive(:change_column_default)
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Table partitions or partition key default value have been changed by another process',
+ table_name: table_name,
+ default_value: 2
+ )
+
+ strategy.validate_and_fix
+ end
+ end
+ end
+
describe '#active_partition' do
it 'is the partition with the largest value' do
expect(strategy.active_partition.value).to eq(2)
@@ -157,6 +220,7 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
end.not_to raise_error
end
end
+
context 'redirecting inserts as the active partition changes' do
let(:model) do
Class.new(ApplicationRecord) do
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index 0d687db0f96..62c5ead855a 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
process_sql(ActiveRecord::Base, "SELECT 1 FROM projects")
end
- context 'properly observes all queries', :add_ci_connection, :request_store do
+ context 'properly observes all queries', :add_ci_connection do
using RSpec::Parameterized::TableSyntax
where do
@@ -28,8 +28,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
expectations: {
gitlab_schemas: "gitlab_main",
db_config_name: "main"
- },
- setup: nil
+ }
},
"for query accessing gitlab_ci and gitlab_main" => {
model: ApplicationRecord,
@@ -37,8 +36,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
expectations: {
gitlab_schemas: "gitlab_ci,gitlab_main",
db_config_name: "main"
- },
- setup: nil
+ }
},
"for query accessing gitlab_ci and gitlab_main the gitlab_schemas is always ordered" => {
model: ApplicationRecord,
@@ -46,8 +44,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
expectations: {
gitlab_schemas: "gitlab_ci,gitlab_main",
db_config_name: "main"
- },
- setup: nil
+ }
},
"for query accessing CI database" => {
model: Ci::ApplicationRecord,
@@ -56,62 +53,6 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
gitlab_schemas: "gitlab_ci",
db_config_name: "ci"
}
- },
- "for query accessing CI database with re-use and disabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: true
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
- stub_feature_flags(force_no_sharing_primary_model: true)
- end
- },
- "for query accessing CI database with re-use and enabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: false
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main')
- stub_feature_flags(force_no_sharing_primary_model: false)
- end
- },
- "for query accessing CI database without re-use and disabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: true
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
- stub_feature_flags(force_no_sharing_primary_model: true)
- end
- },
- "for query accessing CI database without re-use and enabled sharing" => {
- model: Ci::ApplicationRecord,
- sql: "SELECT 1 FROM ci_builds",
- expectations: {
- gitlab_schemas: "gitlab_ci",
- db_config_name: "ci",
- ci_dedicated_primary_connection: true
- },
- setup: ->(_) do
- skip_if_multiple_databases_not_setup
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
- stub_feature_flags(force_no_sharing_primary_model: false)
- end
}
}
end
@@ -122,15 +63,11 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
end
it do
- stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil)
-
- instance_eval(&setup) if setup
-
allow(::Ci::ApplicationRecord.load_balancer).to receive(:configuration)
.and_return(Gitlab::Database::LoadBalancing::Configuration.for_model(::Ci::ApplicationRecord))
expect(described_class.schemas_metrics).to receive(:increment)
- .with({ ci_dedicated_primary_connection: anything }.merge(expectations)).and_call_original
+ .with(expectations).and_call_original
process_sql(model, sql)
end
diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb
index 574111f4c01..c88edc17817 100644
--- a/spec/lib/gitlab/database/shared_model_spec.rb
+++ b/spec/lib/gitlab/database/shared_model_spec.rb
@@ -27,6 +27,19 @@ RSpec.describe Gitlab::Database::SharedModel do
end
end
+ it 'raises an error if the connection does not include `:gitlab_shared` schema' do
+ allow(Gitlab::Database)
+ .to receive(:gitlab_schemas_for_connection)
+ .with(new_connection)
+ .and_return([:gitlab_main])
+
+ expect_original_connection_around do
+ expect do
+ described_class.using_connection(new_connection) {}
+ end.to raise_error(/Cannot set `SharedModel` to connection/)
+ end
+ end
+
context 'when multiple connection overrides are nested', :aggregate_failures do
let(:second_connection) { double('connection') }
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
index 2740664d200..e676e5fe034 100644
--- a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -77,6 +77,7 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
create(:user)
expect(result[:status]).to eq(:success)
+ group.reset
expect(group.members.collect(&:user)).to contain_exactly(user, admin1, admin2)
expect(group.members.collect(&:access_level)).to contain_exactly(
Gitlab::Access::OWNER,
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 23f4f0e7089..064613074cd 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -160,13 +160,15 @@ RSpec.describe Gitlab::Database do
end
end
- context 'when the connection is LoadBalancing::ConnectionProxy' do
- it 'returns primary_db_config' do
- lb_config = ::Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
- lb = ::Gitlab::Database::LoadBalancing::LoadBalancer.new(lb_config)
- proxy = ::Gitlab::Database::LoadBalancing::ConnectionProxy.new(lb)
-
- expect(described_class.db_config_for_connection(proxy)).to eq(lb_config.primary_db_config)
+ context 'when the connection is LoadBalancing::ConnectionProxy', :database_replica do
+ it 'returns primary db config even if ambiguous queries default to replica' do
+ Gitlab::Database::LoadBalancing::Session.current.use_primary!
+ primary_config = described_class.db_config_for_connection(ActiveRecord::Base.connection)
+
+ Gitlab::Database::LoadBalancing::Session.clear_session
+ Gitlab::Database::LoadBalancing::Session.current.fallback_to_replicas_for_ambiguous_queries do
+ expect(described_class.db_config_for_connection(ActiveRecord::Base.connection)).to eq(primary_config)
+ end
end
end
@@ -222,14 +224,7 @@ RSpec.describe Gitlab::Database do
end
describe '.gitlab_schemas_for_connection' do
- it 'does raise exception for invalid connection' do
- expect { described_class.gitlab_schemas_for_connection(:invalid) }.to raise_error /key not found: "unknown"/
- end
-
it 'does return a valid schema depending on a base model used', :request_store do
- # FF due to lib/gitlab/database/load_balancing/configuration.rb:92
- stub_feature_flags(force_no_sharing_primary_model: true)
-
expect(described_class.gitlab_schemas_for_connection(Project.connection)).to include(:gitlab_main, :gitlab_shared)
expect(described_class.gitlab_schemas_for_connection(Ci::Build.connection)).to include(:gitlab_ci, :gitlab_shared)
end
@@ -282,6 +277,15 @@ RSpec.describe Gitlab::Database do
end
end
end
+
+ it 'does return empty for non-adopted connections' do
+ new_connection = ActiveRecord::Base.postgresql_connection(
+ ActiveRecord::Base.connection_db_config.configuration_hash)
+
+ expect(described_class.gitlab_schemas_for_connection(new_connection)).to be_nil
+ ensure
+ new_connection&.disconnect!
+ end
end
describe '#true_value' do
diff --git a/spec/lib/gitlab/diff/custom_diff_spec.rb b/spec/lib/gitlab/diff/custom_diff_spec.rb
deleted file mode 100644
index 77d2a6cbcd6..00000000000
--- a/spec/lib/gitlab/diff/custom_diff_spec.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Diff::CustomDiff do
- include RepoHelpers
-
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
- let(:ipynb_blob) { repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb') }
- let(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') }
-
- describe '#preprocess_before_diff' do
- context 'for ipynb files' do
- it 'transforms the diff' do
- expect(described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)).not_to include('cells')
- end
-
- it 'adds the blob to the list of transformed blobs' do
- described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
-
- expect(described_class.transformed_for_diff?(ipynb_blob)).to be_truthy
- end
- end
-
- context 'for other files' do
- it 'returns nil' do
- expect(described_class.preprocess_before_diff(blob.path, nil, blob)).to be_nil
- end
-
- it 'does not add the blob to the list of transformed blobs' do
- described_class.preprocess_before_diff(blob.path, nil, blob)
-
- expect(described_class.transformed_for_diff?(blob)).to be_falsey
- end
- end
-
- context 'timeout' do
- subject { described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob) }
-
- it 'falls back to nil on timeout' do
- allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
-
- expect(subject).to be_nil
- end
-
- context 'when in foreground' do
- it 'utilizes timeout for web' do
- expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_FOREGROUND).and_call_original
-
- expect(subject).not_to include('cells')
- end
-
- it 'increments metrics' do
- counter = Gitlab::Metrics.counter(:ipynb_semantic_diff_timeouts_total, 'desc')
-
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
- expect { subject }.to change { counter.get(source: described_class::FOREGROUND_EXECUTION) }.by(1)
- end
- end
-
- context 'when in background' do
- before do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- end
-
- it 'utilizes longer timeout for sidekiq' do
- expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_BACKGROUND).and_call_original
-
- expect(subject).not_to include('cells')
- end
-
- it 'increments metrics' do
- counter = Gitlab::Metrics.counter(:ipynb_semantic_diff_timeouts_total, 'desc')
-
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
- expect { subject }.to change { counter.get(source: described_class::BACKGROUND_EXECUTION) }.by(1)
- end
- end
- end
-
- context 'when invalid ipynb' do
- it 'returns nil' do
- expect(ipynb_blob).to receive(:data).and_return('invalid ipynb')
-
- expect(described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)).to be_nil
- end
- end
- end
-
- describe '#transformed_blob_data' do
- it 'transforms blob data if file was processed' do
- described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
-
- expect(described_class.transformed_blob_data(ipynb_blob)).not_to include('cells')
- end
-
- it 'does not transform blob data if file was not processed' do
- expect(described_class.transformed_blob_data(ipynb_blob)).to be_nil
- end
- end
-
- describe '#transformed_blob_language' do
- it 'is md when file was preprocessed' do
- described_class.preprocess_before_diff(ipynb_blob.path, nil, ipynb_blob)
-
- expect(described_class.transformed_blob_language(ipynb_blob)).to eq('md')
- end
-
- it 'is nil for a .ipynb blob that was not preprocessed' do
- expect(described_class.transformed_blob_language(ipynb_blob)).to be_nil
- end
- end
-end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index b7262629e0a..34f4bdde3b5 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -56,45 +56,21 @@ RSpec.describe Gitlab::Diff::File do
context 'when file is ipynb' do
let(:ipynb_semantic_diff) { false }
- let(:rendered_diffs_viewer) { false }
before do
- stub_feature_flags(ipynb_semantic_diff: ipynb_semantic_diff, rendered_diffs_viewer: rendered_diffs_viewer)
+ stub_feature_flags(ipynb_semantic_diff: ipynb_semantic_diff)
end
- context 'when ipynb_semantic_diff is off, and rendered_viewer is off' do
- it 'does not generate notebook diffs' do
- expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff)
- expect(diff_file.rendered).to be_nil
- end
- end
-
- context 'when ipynb_semantic_diff is off, and rendered_viewer is on' do
- let(:rendered_diffs_viewer) { true }
-
- it 'does not generate rendered diff' do
- expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff)
- expect(diff_file.rendered).to be_nil
- end
- end
-
- context 'when ipynb_semantic_diff is on, and rendered_viewer is off' do
- let(:ipynb_semantic_diff) { true }
+ subject { diff_file.rendered }
- it 'transforms using custom diff CustomDiff' do
- expect(Gitlab::Diff::CustomDiff).to receive(:preprocess_before_diff).and_call_original
- expect(diff_file.rendered).to be_nil
- end
+ context 'when ipynb_semantic_diff is off' do
+ it { is_expected.to be_nil }
end
- context 'when ipynb_semantic_diff is on, and rendered_viewer is on' do
+ context 'and rendered_viewer is on' do
let(:ipynb_semantic_diff) { true }
- let(:rendered_diffs_viewer) { true }
- it 'transforms diff using NotebookDiffFile' do
- expect(Gitlab::Diff::CustomDiff).not_to receive(:preprocess_before_diff)
- expect(diff_file.rendered).not_to be_nil
- end
+ it { is_expected.not_to be_nil }
end
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
new file mode 100644
index 00000000000..cb046548880
--- /dev/null
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+require 'set'
+
+MOCK_LINE = Struct.new(:text, :type, :index, :old_pos, :new_pos)
+
+def make_lines(old_lines, new_lines, texts = nil, types = nil)
+ old_lines.each_with_index.map do |old, i|
+ MOCK_LINE.new(texts ? texts[i] : '', types ? types[i] : nil, i, old, new_lines[i])
+ end
+end
+
+RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFileHelper do
+ let(:dummy) { Class.new { include Gitlab::Diff::Rendered::Notebook::DiffFileHelper }.new }
+
+ describe '#strip_diff_frontmatter' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { dummy.strip_diff_frontmatter(diff) }
+
+ where(:diff, :result) do
+ "FileLine1\nFileLine2\n@@ -1,76 +1,74 @@\nhello\n" | "@@ -1,76 +1,74 @@\nhello\n"
+ "" | nil
+ nil | nil
+ end
+
+ with_them do
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '#map_transformed_line_to_source' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { dummy.source_line_from_block(1, transformed_blocks) }
+
+ where(:case, :transformed_blocks, :result) do
+ 'if transformed diff is empty' | [] | 0
+ 'if the transformed line does not map to any in the original file' | [{ source_line: nil }] | 0
+ 'if the transformed line maps to a line in the source file' | [{ source_line: 2 }] | 3
+ end
+
+ with_them do
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ describe '#image_as_rich_text' do
+ let(:img) { 'data:image/png;base64,some_image_here' }
+ let(:line_text) { " ![](#{img})"}
+
+ subject { dummy.image_as_rich_text(line_text) }
+
+ context 'text does not contain image' do
+ let(:img) { "not an image" }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'text contains image' do
+ it { is_expected.to eq("<img src=\"#{img}\">") }
+ end
+
+ context 'text contains image that has malicious html' do
+ let(:img) { 'data:image/png;base64,some_image_here"<div>Hello</div>' }
+
+ it 'sanitizes the html' do
+ expect(subject).not_to include('<div>Hello')
+ end
+
+ it 'adds image to src' do
+ expect(subject).to end_with('/div&gt;">')
+ end
+ end
+ end
+
+ describe '#line_positions_at_source_diff' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:blocks) do
+ {
+ from: [0, 2, 1, nil, nil, 3].map { |i| { source_line: i } },
+ to: [0, 1, nil, 2, nil, 3].map { |i| { source_line: i } }
+ }
+ end
+
+ let(:lines) do
+ make_lines(
+ [1, 2, 3, 4, 5, 5, 5, 5, 6],
+ [1, 2, 2, 2, 2, 3, 4, 5, 6],
+ 'ACBLDJEKF'.split(""),
+ [nil, 'old', 'old', 'old', 'new', 'new', 'new', nil, nil]
+ )
+ end
+
+ subject { dummy.line_positions_at_source_diff(lines, blocks)[index] }
+
+ where(:case, :index, :transformed_positions, :mapped_positions) do
+ " A A" | 0 | [1, 1] | [1, 1] # No change, old_pos and new_pos have mappings
+ "- C " | 1 | [2, 2] | [3, 2] # A removal, both old_pos and new_pos have valid mappings
+ "- B " | 2 | [3, 2] | [2, 2] # A removal, both old_pos and new_pos have valid mappings
+ "- L " | 3 | [4, 2] | [0, 0] # A removal, but old_pos has no mapping
+ "+ D" | 4 | [5, 2] | [4, 2] # An addition, new_pos has mapping but old_pos does not, so old_pos is remapped
+ "+ J" | 5 | [5, 3] | [0, 0] # An addition, but new_pos has no mapping, so neither are remapped
+ "+ E" | 6 | [5, 4] | [4, 3] # An addition, new_pos has mapping but old_pos does not, so old_pos is remapped
+ " K K" | 7 | [5, 5] | [0, 0] # This has no mapping
+ " F F" | 8 | [6, 6] | [4, 4] # No change, old_pos and new_pos have mappings
+ end
+
+ with_them do
+ it { is_expected.to eq(mapped_positions) }
+ end
+ end
+
+ describe '#lines_in_source_diff' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:lines) { make_lines(old_lines, new_lines) }
+
+ subject { dummy.lines_in_source_diff(lines, is_deleted, is_new) }
+
+ where(:old_lines, :new_lines, :is_deleted, :is_new, :existing_lines) do
+ [1, 2, 2] | [1, 1, 4] | false | false | { from: Set[1, 2], to: Set[1, 4] }
+ [1, 2, 2] | [1, 1, 4] | true | false | { from: Set[1, 2], to: Set[] }
+ [1, 2, 2] | [1, 1, 4] | false | true | { from: Set[], to: Set[1, 4] }
+ end
+
+ with_them do
+ it { is_expected.to eq(existing_lines) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
index 1b74e24bf81..c38684a6dc3 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
context 'timeout' do
it 'utilizes timeout for web' do
- expect(Timeout).to receive(:timeout).with(described_class::RENDERED_TIMEOUT_FOREGROUND).and_call_original
+ expect(Timeout).to receive(:timeout).with(Gitlab::RenderTimeout::FOREGROUND).and_call_original
nb_file.diff
end
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
context 'assigns the correct position' do
- it 'computes de first line where the remove would appear' do
+ it 'computes the first line where the remove would appear' do
expect(nb_file.highlighted_diff_lines[0].old_pos).to eq(3)
expect(nb_file.highlighted_diff_lines[0].new_pos).to eq(3)
@@ -142,8 +142,29 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFile do
end
end
- it 'computes de first line where the remove would appear' do
- expect(nb_file.highlighted_diff_lines.map(&:text).join('')).to include('[Hidden Image Output]')
+ context 'has image' do
+ it 'replaces rich text with img to the embedded image' do
+ expect(nb_file.highlighted_diff_lines[58].rich_text).to include('<img')
+ end
+
+ it 'adds image to src' do
+ img = 'data:image/png;base64,some_image_here'
+ allow(diff).to receive(:diff).and_return("@@ -1,76 +1,74 @@\n ![](#{img})")
+
+ expect(nb_file.highlighted_diff_lines[0].rich_text).to include("<img src=\"#{img}\"")
+ end
+ end
+
+ context 'when embedded image has injected html' do
+ let(:commit) { project.commit("4963fefc990451a8ad34289ce266b757456fc88c") }
+
+ it 'prevents injected html to be rendered as html' do
+ expect(nb_file.highlighted_diff_lines[45].rich_text).not_to include('<div>Hello')
+ end
+
+ it 'keeps the injected html as part of the string' do
+ expect(nb_file.highlighted_diff_lines[45].rich_text).to end_with('/div&gt;">')
+ end
end
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 8d008986464..6e7806c5d53 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
let(:author_email) { 'jake@adventuretime.ooo' }
let(:message_id) { 'CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com' }
- let_it_be(:group) { create(:group, :private, name: "email") }
+ let_it_be(:group) { create(:group, :private, :crm_enabled, name: "email") }
let(:expected_description) do
"Service desk stuff!\n\n```\na = b\n```\n\n`/label ~label1`\n`/assign @user1`\n`/close`\n![image](uploads/image.png)"
@@ -52,6 +52,14 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect(new_issue.issue_email_participants.first.email).to eq(author_email)
end
+ it 'attaches existing CRM contact' do
+ contact = create(:contact, group: group, email: author_email)
+ receiver.execute
+ new_issue = Issue.last
+
+ expect(new_issue.issue_customer_relations_contacts.last.contact).to eq(contact)
+ end
+
it 'sends thank you email' do
expect { receiver.execute }.to have_enqueued_job.on_queue('mailers')
end
diff --git a/spec/lib/gitlab/email/message/repository_push_spec.rb b/spec/lib/gitlab/email/message/repository_push_spec.rb
index 6b1f03e0385..f13d98ec9b9 100644
--- a/spec/lib/gitlab/email/message/repository_push_spec.rb
+++ b/spec/lib/gitlab/email/message/repository_push_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Email::Message::RepositoryPush do
describe '#commits' do
subject { message.commits }
- it { is_expected.to be_kind_of Array }
+ it { is_expected.to be_kind_of CommitCollection }
it { is_expected.to all(be_instance_of Commit) }
end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 9040731d8fd..79476c63e66 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::Email::Receiver do
let_it_be(:project) { create(:project) }
let(:handler) { double(:handler, project: project, execute: true, metrics_event: nil, metrics_params: nil) }
+ let(:client_id) { 'email/jake@example.com' }
it 'correctly finds the mail key' do
expect(Gitlab::Email::Handler).to receive(:for).with(an_instance_of(Mail::Message), 'gitlabhq/gitlabhq+auth_token').and_return(handler)
@@ -33,7 +34,7 @@ RSpec.describe Gitlab::Email::Receiver do
metadata = receiver.mail_metadata
expect(metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to meta received_recipients))
- expect(metadata[:meta]).to include(client_id: 'email/jake@example.com', project: project.full_path)
+ expect(metadata[:meta]).to include(client_id: client_id, project: project.full_path)
expect(metadata[meta_key]).to eq(meta_value)
end
end
@@ -89,19 +90,9 @@ RSpec.describe Gitlab::Email::Receiver do
let(:meta_key) { :received_recipients }
let(:meta_value) { ['incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com', 'incoming+gitlabhq/gitlabhq@example.com'] }
- context 'when use_received_header_for_incoming_emails is enabled' do
+ describe 'it uses receive headers to find the key' do
it_behaves_like 'successful receive'
end
-
- context 'when use_received_header_for_incoming_emails is disabled' do
- let(:expected_error) { Gitlab::Email::UnknownIncomingEmail }
-
- before do
- stub_feature_flags(use_received_header_for_incoming_emails: false)
- end
-
- it_behaves_like 'failed receive'
- end
end
end
@@ -126,6 +117,49 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'failed receive'
end
+ context "when the email's To field is blank" do
+ before do
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
+ end
+
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
+ From: "jake@example.com" <jake@example.com>
+ Bcc: "support@example.com" <support@example.com>
+
+ Email content
+ EMAIL
+ end
+
+ let(:meta_key) { :delivered_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
+
+ it_behaves_like 'successful receive'
+ end
+
+ context "when the email's From field is blank" do
+ before do
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
+ end
+
+ let(:email_raw) do
+ <<~EMAIL
+ Delivered-To: incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com
+ To: "support@example.com" <support@example.com>
+
+ Email content
+ EMAIL
+ end
+
+ let(:meta_key) { :delivered_to }
+ let(:meta_value) { ["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"] }
+
+ it_behaves_like 'successful receive' do
+ let(:client_id) { 'email/' }
+ end
+ end
+
context 'when the email was auto generated with X-Autoreply header' do
let(:email_raw) { fixture_file('emails/auto_reply.eml') }
let(:expected_error) { Gitlab::Email::AutoGeneratedEmailError }
diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb
index c0d177aff4d..c61d941406b 100644
--- a/spec/lib/gitlab/email/reply_parser_spec.rb
+++ b/spec/lib/gitlab/email/reply_parser_spec.rb
@@ -268,5 +268,72 @@ RSpec.describe Gitlab::Email::ReplyParser do
expect(test_parse_body(fixture_file("emails/valid_new_issue_with_quote.eml"), { append_reply: true }))
.to contain_exactly(body, reply)
end
+
+ context 'non-UTF-8 content' do
+ let(:charset) { '; charset=Shift_JIS' }
+ let(:raw_content) do
+ <<-BODY.strip_heredoc.chomp
+ From: Jake the Dog <alan@adventuretime.ooo>
+ To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+ Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+ Subject: The message subject! @all
+ Content-Type: text/plain#{charset}
+ Content-Transfer-Encoding: 8bit
+
+ こんにちは。 この世界は素晴らしいです。
+ BODY
+ end
+
+ # Strip encoding to simulate the case when Ruby fallback to ASCII-8bit
+ # when it meets an unknown encoding
+ let(:encoded_content) { raw_content.encode("Shift_JIS").bytes.pack("c*") }
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(encoded_content))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ こんにちは。 この世界は素晴らしいです。
+ BODY
+ end
+
+ # This test would raise an exception if encoding is not handled properly
+ # Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/364329
+ context 'charset is absent and reply trimming is disabled' do
+ let(:charset) { '' }
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(encoded_content, { trim_reply: false }))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ こんにちは。 この世界は素晴らしいです。
+ BODY
+ end
+ end
+
+ context 'multipart email' do
+ let(:raw_content) do
+ <<-BODY.strip_heredoc.chomp
+ From: Jake the Dog <alan@adventuretime.ooo>
+ To: incoming+email-test-project_id-issue-@appmail.adventuretime.ooo
+ Message-ID: <CAH_Wr+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+ Subject: The message subject! @all
+ Content-Type: multipart/alternative;
+ boundary=Apple-Mail-B41C7F8E-3639-49B0-A5D5-440E125A7105
+ Content-Transfer-Encoding: 7bbit
+
+ --Apple-Mail-B41C7F8E-3639-49B0-A5D5-440E125A7105
+ Content-Type: text/plain
+ Content-Transfer-Encodng: 7bit
+
+ こんにちは。 この世界は素晴らしいです。
+ BODY
+ end
+
+ it "parses body under UTF-8 encoding" do
+ expect(test_parse_body(encoded_content, { trim_reply: false }))
+ .to eq(<<-BODY.strip_heredoc.chomp)
+ こんにちは。 この世界は素晴らしいです。
+ BODY
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/email/service_desk_receiver_spec.rb b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
index 49cbec6fffc..c249a5422ff 100644
--- a/spec/lib/gitlab/email/service_desk_receiver_spec.rb
+++ b/spec/lib/gitlab/email/service_desk_receiver_spec.rb
@@ -53,6 +53,18 @@ RSpec.describe Gitlab::Email::ServiceDeskReceiver do
end
end
+ context 'when the email contains no key in the To header and contains reference header with no key' do
+ let(:email) { fixture_file('emails/service_desk_reference_headers.eml') }
+
+ before do
+ stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
+ end
+
+ it 'sends a rejection email' do
+ expect { receiver.execute }.to raise_error(Gitlab::Email::UnknownIncomingEmail)
+ end
+ end
+
context 'when the email does not contain a valid email address' do
before do
stub_service_desk_email_setting(enabled: true, address: 'other_support+%{key}@example.com')
diff --git a/spec/lib/gitlab/error_tracking/logger_spec.rb b/spec/lib/gitlab/error_tracking/logger_spec.rb
new file mode 100644
index 00000000000..751ec10a1f0
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/logger_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ErrorTracking::Logger do
+ describe '.capture_exception' do
+ let(:exception) { RuntimeError.new('boom') }
+ let(:payload) { { foo: '123' } }
+ let(:log_entry) { { message: 'boom', context: payload }}
+
+ it 'calls Gitlab::ErrorTracking::Logger.error with formatted log entry' do
+ expect_next_instance_of(Gitlab::ErrorTracking::LogFormatter) do |log_formatter|
+ expect(log_formatter).to receive(:generate_log).with(exception, payload).and_return(log_entry)
+ end
+
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(log_entry)
+
+ described_class.capture_exception(exception, **payload)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/event_store/store_spec.rb b/spec/lib/gitlab/event_store/store_spec.rb
index 94e8f0ff2ff..bbdfecc897a 100644
--- a/spec/lib/gitlab/event_store/store_spec.rb
+++ b/spec/lib/gitlab/event_store/store_spec.rb
@@ -134,6 +134,7 @@ RSpec.describe Gitlab::EventStore::Store do
describe '#publish' do
let(:data) { { name: 'Bob', id: 123 } }
+ let(:serialized_data) { data.deep_stringify_keys }
context 'when event has a subscribed worker' do
let(:store) do
@@ -144,12 +145,21 @@ RSpec.describe Gitlab::EventStore::Store do
end
it 'dispatches the event to the subscribed worker' do
- expect(worker).to receive(:perform_async).with('TestEvent', data)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data)
expect(another_worker).not_to receive(:perform_async)
store.publish(event)
end
+ it 'does not raise any Sidekiq warning' do
+ logger = double(:logger, info: nil)
+ allow(Sidekiq).to receive(:logger).and_return(logger)
+ expect(logger).not_to receive(:warn).with(/do not serialize to JSON safely/)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data).and_call_original
+
+ store.publish(event)
+ end
+
context 'when other workers subscribe to the same event' do
let(:store) do
described_class.new do |store|
@@ -160,8 +170,8 @@ RSpec.describe Gitlab::EventStore::Store do
end
it 'dispatches the event to each subscribed worker' do
- expect(worker).to receive(:perform_async).with('TestEvent', data)
- expect(another_worker).to receive(:perform_async).with('TestEvent', data)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data)
+ expect(another_worker).to receive(:perform_async).with('TestEvent', serialized_data)
expect(unrelated_worker).not_to receive(:perform_async)
store.publish(event)
@@ -215,7 +225,7 @@ RSpec.describe Gitlab::EventStore::Store do
let(:event) { event_klass.new(data: data) }
it 'dispatches the event to the workers satisfying the condition' do
- expect(worker).to receive(:perform_async).with('TestEvent', data)
+ expect(worker).to receive(:perform_async).with('TestEvent', serialized_data)
expect(another_worker).not_to receive(:perform_async)
store.publish(event)
diff --git a/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb b/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb
index 6b8bb2229a9..8be9f55dbb6 100644
--- a/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/fogbugz_import/project_creator_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::FogbugzImport::ProjectCreator do
let(:user) { create(:user) }
-
let(:repo) do
instance_double(Gitlab::FogbugzImport::Repository,
name: 'Vim',
@@ -13,10 +12,11 @@ RSpec.describe Gitlab::FogbugzImport::ProjectCreator do
raw_data: '')
end
+ let(:repo_name) { 'new_name' }
let(:uri) { 'https://testing.fogbugz.com' }
let(:token) { 'token' }
let(:fb_session) { { uri: uri, token: token } }
- let(:project_creator) { described_class.new(repo, fb_session, user.namespace, user) }
+ let(:project_creator) { described_class.new(repo, repo_name, user.namespace, user, fb_session) }
subject do
project_creator.execute
@@ -26,4 +26,9 @@ RSpec.describe Gitlab::FogbugzImport::ProjectCreator do
expect(subject.persisted?).to eq(true)
expect(subject.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
+
+ it 'creates project with provided name and path' do
+ expect(subject.name).to eq(repo_name)
+ expect(subject.path).to eq(repo_name)
+ end
end
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index a5f26a212ab..2b1fcac9257 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -3,163 +3,194 @@
require 'spec_helper'
RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
- let_it_be(:user) { build(:user) }
- let_it_be(:fake_template) do
- Object.new.tap do |template|
- template.extend ActionView::Helpers::FormHelper
- template.extend ActionView::Helpers::FormOptionsHelper
- template.extend ActionView::Helpers::TagHelper
- template.extend ActionView::Context
- end
- end
+ include FormBuilderHelpers
- let_it_be(:form_builder) { described_class.new(:user, user, fake_template, {}) }
-
- describe '#gitlab_ui_checkbox_component' do
- let(:optional_args) { {} }
+ let_it_be(:user) { build(:user, :admin) }
- subject(:checkbox_html) { form_builder.gitlab_ui_checkbox_component(:view_diffs_file_by_file, "Show one file at a time on merge request's Changes tab", **optional_args) }
-
- context 'without optional arguments' do
- it 'renders correct html' do
- expected_html = <<~EOS
- <div class="gl-form-checkbox custom-control custom-checkbox">
- <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
- <input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
- <label class="custom-control-label" for="user_view_diffs_file_by_file">
- Show one file at a time on merge request&#39;s Changes tab
- </label>
- </div>
- EOS
+ let_it_be(:form_builder) { described_class.new(:user, user, fake_action_view_base, {}) }
- expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ describe '#gitlab_ui_checkbox_component' do
+ context 'when not using slots' do
+ let(:optional_args) { {} }
+
+ subject(:checkbox_html) do
+ form_builder.gitlab_ui_checkbox_component(
+ :view_diffs_file_by_file,
+ "Show one file at a time on merge request's Changes tab",
+ **optional_args
+ )
end
- end
- context 'with optional arguments' do
- let(:optional_args) do
- {
- help_text: 'Instead of all the files changed, show only one file at a time.',
- checkbox_options: { class: 'checkbox-foo-bar' },
- label_options: { class: 'label-foo-bar' },
- checked_value: '3',
- unchecked_value: '1'
- }
+ context 'without optional arguments' do
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
+ <input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
- it 'renders help text' do
- expected_html = <<~EOS
- <div class="gl-form-checkbox custom-control custom-checkbox">
- <input name="user[view_diffs_file_by_file]" type="hidden" value="1" />
- <input class="custom-control-input checkbox-foo-bar" type="checkbox" value="3" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
- <label class="custom-control-label label-foo-bar" for="user_view_diffs_file_by_file">
- <span>Show one file at a time on merge request&#39;s Changes tab</span>
- <p class="help-text">Instead of all the files changed, show only one file at a time.</p>
- </label>
- </div>
- EOS
-
- expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
- end
-
- it 'passes arguments to `check_box` method' do
- allow(fake_template).to receive(:check_box).and_return('')
-
- checkbox_html
-
- expect(fake_template).to have_received(:check_box).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-input checkbox-foo-bar), object: user }, '3', '1')
+ context 'with optional arguments' do
+ let(:optional_args) do
+ {
+ help_text: 'Instead of all the files changed, show only one file at a time.',
+ checkbox_options: { class: 'checkbox-foo-bar' },
+ label_options: { class: 'label-foo-bar' },
+ checked_value: '3',
+ unchecked_value: '1'
+ }
+ end
+
+ it 'renders help text' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="1" />
+ <input class="custom-control-input checkbox-foo-bar" type="checkbox" value="3" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label label-foo-bar" for="user_view_diffs_file_by_file">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Instead of all the files changed, show only one file at a time.</p>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
- it 'passes arguments to `label` method' do
- allow(fake_template).to receive(:label).and_return('')
-
- checkbox_html
-
- expect(fake_template).to have_received(:label).with(:user, :view_diffs_file_by_file, { class: %w(custom-control-label label-foo-bar), object: user, value: nil })
+ context 'with checkbox_options: { multiple: true }' do
+ let(:optional_args) do
+ {
+ checkbox_options: { multiple: true },
+ checked_value: 'one',
+ unchecked_value: false
+ }
+ end
+
+ it 'renders labels with correct for attributes' do
+ expected_html = <<~EOS
+ <div class="gl-form-checkbox custom-control custom-checkbox">
+ <input class="custom-control-input" type="checkbox" value="one" name="user[view_diffs_file_by_file][]" id="user_view_diffs_file_by_file_one" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file_one">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
end
- context 'with checkbox_options: { multiple: true }' do
- let(:optional_args) do
- {
- checkbox_options: { multiple: true },
- checked_value: 'one',
- unchecked_value: false
- }
+ context 'when using slots' do
+ subject(:checkbox_html) do
+ form_builder.gitlab_ui_checkbox_component(
+ :view_diffs_file_by_file
+ ) do |c|
+ c.label { "Show one file at a time on merge request's Changes tab" }
+ c.help_text { 'Instead of all the files changed, show only one file at a time.' }
+ end
end
- it 'renders labels with correct for attributes' do
+ it 'renders correct html' do
expected_html = <<~EOS
<div class="gl-form-checkbox custom-control custom-checkbox">
- <input class="custom-control-input" type="checkbox" value="one" name="user[view_diffs_file_by_file][]" id="user_view_diffs_file_by_file_one" />
- <label class="custom-control-label" for="user_view_diffs_file_by_file_one">
- Show one file at a time on merge request&#39;s Changes tab
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
+ <input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
+ <label class="custom-control-label" for="user_view_diffs_file_by_file">
+ <span>Show one file at a time on merge request&#39;s Changes tab</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Instead of all the files changed, show only one file at a time.</p>
</label>
</div>
EOS
- expect(checkbox_html).to eq(html_strip_whitespace(expected_html))
+ expect(html_strip_whitespace(checkbox_html)).to eq(html_strip_whitespace(expected_html))
end
end
end
describe '#gitlab_ui_radio_component' do
- let(:optional_args) { {} }
-
- subject(:radio_html) { form_builder.gitlab_ui_radio_component(:access_level, :admin, "Access Level", **optional_args) }
+ context 'when not using slots' do
+ let(:optional_args) { {} }
+
+ subject(:radio_html) do
+ form_builder.gitlab_ui_radio_component(
+ :access_level,
+ :admin,
+ "Admin",
+ **optional_args
+ )
+ end
- context 'without optional arguments' do
- it 'renders correct html' do
- expected_html = <<~EOS
- <div class="gl-form-radio custom-control custom-radio">
- <input class="custom-control-input" type="radio" value="admin" name="user[access_level]" id="user_access_level_admin" />
- <label class="custom-control-label" for="user_access_level_admin">
- Access Level
- </label>
- </div>
- EOS
+ context 'without optional arguments' do
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <div class="gl-form-radio custom-control custom-radio">
+ <input class="custom-control-input" type="radio" value="admin" checked="checked" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label" for="user_access_level_admin">
+ <span>Admin</span>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(radio_html)).to eq(html_strip_whitespace(expected_html))
+ end
+ end
- expect(radio_html).to eq(html_strip_whitespace(expected_html))
+ context 'with optional arguments' do
+ let(:optional_args) do
+ {
+ help_text: 'Administrators have access to all groups, projects, and users and can manage all features in this installation',
+ radio_options: { class: 'radio-foo-bar' },
+ label_options: { class: 'label-foo-bar' }
+ }
+ end
+
+ it 'renders help text' do
+ expected_html = <<~EOS
+ <div class="gl-form-radio custom-control custom-radio">
+ <input class="custom-control-input radio-foo-bar" type="radio" value="admin" checked="checked" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label label-foo-bar" for="user_access_level_admin">
+ <span>Admin</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Administrators have access to all groups, projects, and users and can manage all features in this installation</p>
+ </label>
+ </div>
+ EOS
+
+ expect(html_strip_whitespace(radio_html)).to eq(html_strip_whitespace(expected_html))
+ end
end
end
- context 'with optional arguments' do
- let(:optional_args) do
- {
- help_text: 'Administrators have access to all groups, projects, and users and can manage all features in this installation',
- radio_options: { class: 'radio-foo-bar' },
- label_options: { class: 'label-foo-bar' }
- }
+ context 'when using slots' do
+ subject(:radio_html) do
+ form_builder.gitlab_ui_radio_component(
+ :access_level,
+ :admin
+ ) do |c|
+ c.label { "Admin" }
+ c.help_text { 'Administrators have access to all groups, projects, and users and can manage all features in this installation' }
+ end
end
- it 'renders help text' do
+ it 'renders correct html' do
expected_html = <<~EOS
<div class="gl-form-radio custom-control custom-radio">
- <input class="custom-control-input radio-foo-bar" type="radio" value="admin" name="user[access_level]" id="user_access_level_admin" />
- <label class="custom-control-label label-foo-bar" for="user_access_level_admin">
- <span>Access Level</span>
- <p class="help-text">Administrators have access to all groups, projects, and users and can manage all features in this installation</p>
+ <input class="custom-control-input" type="radio" value="admin" checked="checked" name="user[access_level]" id="user_access_level_admin" />
+ <label class="custom-control-label" for="user_access_level_admin">
+ <span>Admin</span>
+ <p class="help-text" data-testid="pajamas-component-help-text">Administrators have access to all groups, projects, and users and can manage all features in this installation</p>
</label>
</div>
EOS
- expect(radio_html).to eq(html_strip_whitespace(expected_html))
- end
-
- it 'passes arguments to `radio_button` method' do
- allow(fake_template).to receive(:radio_button).and_return('')
-
- radio_html
-
- expect(fake_template).to have_received(:radio_button).with(:user, :access_level, :admin, { class: %w(custom-control-input radio-foo-bar), object: user })
- end
-
- it 'passes arguments to `label` method' do
- allow(fake_template).to receive(:label).and_return('')
-
- radio_html
-
- expect(fake_template).to have_received(:label).with(:user, :access_level, { class: %w(custom-control-label label-foo-bar), object: user, value: :admin })
+ expect(html_strip_whitespace(radio_html)).to eq(html_strip_whitespace(expected_html))
end
end
end
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index 7d4a3655be6..8bb649e78e0 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
let(:old_project) { create(:project, name: 'old-project', group: group) }
let(:old_project_ref) { old_project.to_reference_base(new_project) }
let(:text) { 'some text' }
+ let(:note) { create(:note, note: text, project: old_project) }
before do
old_project.add_reporter(user)
@@ -17,7 +18,7 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
describe '#rewrite' do
subject do
- described_class.new(text, old_project, user).rewrite(new_project)
+ described_class.new(note.note, note.note_html, old_project, user).rewrite(new_project)
end
context 'multiple issues and merge requests referenced' do
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index f878f02f410..763e6f1b5f4 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
let(:user) { create(:user) }
let(:old_project) { create(:project) }
let(:new_project) { create(:project) }
- let(:rewriter) { described_class.new(text, old_project, user) }
+ let(:rewriter) { described_class.new(+text, nil, old_project, user) }
context 'text contains links to uploads' do
let(:image_uploader) do
@@ -22,13 +22,21 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
"Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
end
+ def referenced_files(text, project)
+ referenced_files = text.scan(FileUploader::MARKDOWN_PATTERN).map do
+ UploaderFinder.new(project, $~[:secret], $~[:file]).execute
+ end
+
+ referenced_files.compact.select(&:exists?)
+ end
+
shared_examples "files are accessible" do
describe '#rewrite' do
let!(:new_text) { rewriter.rewrite(new_project) }
let(:old_files) { [image_uploader, zip_uploader] }
let(:new_files) do
- described_class.new(new_text, new_project, user).files
+ referenced_files(new_text, new_project)
end
let(:old_paths) { old_files.map(&:path) }
@@ -68,9 +76,9 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
it 'does not rewrite plain links as embedded' do
embedded_link = image_uploader.markdown_link
plain_image_link = embedded_link.delete_prefix('!')
- text = "#{plain_image_link} and #{embedded_link}"
+ text = +"#{plain_image_link} and #{embedded_link}"
- moved_text = described_class.new(text, old_project, user).rewrite(new_project)
+ moved_text = described_class.new(text, nil, old_project, user).rewrite(new_project)
expect(moved_text.scan(/!\[.*?\]/).count).to eq(1)
expect(moved_text.scan(/\A\[.*?\]/).count).to eq(1)
@@ -97,11 +105,5 @@ RSpec.describe Gitlab::Gfm::UploadsRewriter do
it { is_expected.to eq true }
end
-
- describe '#files' do
- subject { rewriter.files }
-
- it { is_expected.to be_an(Array) }
- end
end
end
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index b6a61de87a6..a7036a4f20a 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -121,13 +121,19 @@ RSpec.describe Gitlab::GitAccessSnippet do
if Ability.allowed?(user, :update_snippet, snippet)
expect { push_access_check }.not_to raise_error
else
- expect { push_access_check }.to raise_error(described_class::ForbiddenError)
+ expect { push_access_check }.to raise_error(
+ described_class::ForbiddenError,
+ described_class::ERROR_MESSAGES[:update_snippet]
+ )
end
if Ability.allowed?(user, :read_snippet, snippet)
expect { pull_access_check }.not_to raise_error
else
- expect { pull_access_check }.to raise_error(described_class::ForbiddenError)
+ expect { pull_access_check }.to raise_error(
+ described_class::ForbiddenError,
+ described_class::ERROR_MESSAGES[:read_snippet]
+ )
end
end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index e628a06a542..5ee9cf05b3e 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::GitAccess do
describe '#check with single protocols allowed' do
def disable_protocol(protocol)
- allow(Gitlab::ProtocolAccess).to receive(:allowed?).with(protocol).and_return(false)
+ allow(Gitlab::ProtocolAccess).to receive(:allowed?).with(protocol, project: project).and_return(false)
end
context 'ssh disabled' do
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index de3e674c3a7..11c19c7d3f0 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::GitAccessWiki do
redirected_path: redirected_path)
end
- RSpec.shared_examples 'wiki access by level' do
+ RSpec.shared_examples 'download wiki access by level' do
where(:project_visibility, :project_member?, :wiki_access_level, :wiki_repo?, :expected_behavior) do
[
# Private project - is a project member
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::GitAccessWiki do
subject { access.check('git-upload-pack', Gitlab::GitAccess::ANY) }
context 'when actor is a user' do
- it_behaves_like 'wiki access by level'
+ it_behaves_like 'download wiki access by level'
end
context 'when the actor is a deploy token' do
@@ -116,6 +116,36 @@ RSpec.describe Gitlab::GitAccessWiki do
subject { access.check('git-upload-pack', changes) }
+ context 'when the wiki feature is enabled' do
+ let(:wiki_access_level) { ProjectFeature::ENABLED }
+
+ it { expect { subject }.not_to raise_error }
+ end
+
+ context 'when the wiki feature is disabled' do
+ let(:wiki_access_level) { ProjectFeature::DISABLED }
+
+ it { expect { subject }.to raise_wiki_forbidden }
+ end
+
+ context 'when the wiki feature is private' do
+ let(:wiki_access_level) { ProjectFeature::PRIVATE }
+
+ it { expect { subject }.to raise_wiki_forbidden }
+ end
+ end
+
+ context 'when the actor is a deploy key' do
+ let_it_be(:actor) { create(:deploy_key) }
+ let_it_be(:deploy_key_project) { create(:deploy_keys_project, project: project, deploy_key: actor) }
+ let_it_be(:user) { actor }
+
+ before do
+ project.project_feature.update_attribute(:wiki_access_level, wiki_access_level)
+ end
+
+ subject { access.check('git-upload-pack', changes) }
+
context 'when the wiki is enabled' do
let(:wiki_access_level) { ProjectFeature::ENABLED }
@@ -140,7 +170,7 @@ RSpec.describe Gitlab::GitAccessWiki do
subject { access.check('git-upload-pack', changes) }
- it_behaves_like 'wiki access by level'
+ it_behaves_like 'download wiki access by level'
end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 92860c9232f..3a34d39c722 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -340,17 +340,12 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
let(:revisions) { [revision] }
let(:gitaly_commits) { create_list(:gitaly_commit, 3) }
let(:expected_commits) { gitaly_commits.map { |c| Gitlab::Git::Commit.new(repository, c) }}
- let(:filter_quarantined_commits) { false }
subject do
- client.list_new_commits(revisions, allow_quarantine: allow_quarantine)
+ client.list_new_commits(revisions)
end
shared_examples 'a #list_all_commits message' do
- before do
- stub_feature_flags(filter_quarantined_commits: filter_quarantined_commits)
- end
-
it 'sends a list_all_commits message' do
expected_repository = repository.gitaly_repository.dup
expected_repository.git_alternate_object_directories = Google::Protobuf::RepeatedField.new(:string)
@@ -360,29 +355,25 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
.with(gitaly_request_with_params(repository: expected_repository), kind_of(Hash))
.and_return([Gitaly::ListAllCommitsResponse.new(commits: gitaly_commits)])
- if filter_quarantined_commits
- # The object directory of the repository must not be set so that we
- # don't use the quarantine directory.
- objects_exist_repo = repository.gitaly_repository.dup
- objects_exist_repo.git_object_directory = ""
-
- # The first request contains the repository, the second request the
- # commit IDs we want to check for existence.
- objects_exist_request = [
- gitaly_request_with_params(repository: objects_exist_repo),
- gitaly_request_with_params(revisions: gitaly_commits.map(&:id))
- ]
-
- objects_exist_response = Gitaly::CheckObjectsExistResponse.new(revisions: revision_existence.map do
- |rev, exists| Gitaly::CheckObjectsExistResponse::RevisionExistence.new(name: rev, exists: exists)
- end)
-
- expect(service).to receive(:check_objects_exist)
- .with(objects_exist_request, kind_of(Hash))
- .and_return([objects_exist_response])
- else
- expect(service).not_to receive(:check_objects_exist)
- end
+ # The object directory of the repository must not be set so that we
+ # don't use the quarantine directory.
+ objects_exist_repo = repository.gitaly_repository.dup
+ objects_exist_repo.git_object_directory = ""
+
+ # The first request contains the repository, the second request the
+ # commit IDs we want to check for existence.
+ objects_exist_request = [
+ gitaly_request_with_params(repository: objects_exist_repo),
+ gitaly_request_with_params(revisions: gitaly_commits.map(&:id))
+ ]
+
+ objects_exist_response = Gitaly::CheckObjectsExistResponse.new(revisions: revision_existence.map do
+ |rev, exists| Gitaly::CheckObjectsExistResponse::RevisionExistence.new(name: rev, exists: exists)
+ end)
+
+ expect(service).to receive(:check_objects_exist)
+ .with(objects_exist_request, kind_of(Hash))
+ .and_return([objects_exist_response])
end
expect(subject).to eq(expected_commits)
@@ -418,49 +409,31 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
}
end
- context 'with allowed quarantine' do
- let(:allow_quarantine) { true }
-
- context 'without commit filtering' do
- it_behaves_like 'a #list_all_commits message'
- end
-
- context 'with commit filtering' do
- let(:filter_quarantined_commits) { true }
-
- context 'reject commits which exist in target repository' do
- let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, true] } }
- let(:expected_commits) { [] }
-
- it_behaves_like 'a #list_all_commits message'
- end
-
- context 'keep commits which do not exist in target repository' do
- let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, false] } }
+ context 'reject commits which exist in target repository' do
+ let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, true] } }
+ let(:expected_commits) { [] }
- it_behaves_like 'a #list_all_commits message'
- end
+ it_behaves_like 'a #list_all_commits message'
+ end
- context 'mixed existing and nonexisting commits' do
- let(:revision_existence) do
- {
- gitaly_commits[0].id => true,
- gitaly_commits[1].id => false,
- gitaly_commits[2].id => true
- }
- end
+ context 'keep commits which do not exist in target repository' do
+ let(:revision_existence) { gitaly_commits.to_h { |c| [c.id, false] } }
- let(:expected_commits) { [Gitlab::Git::Commit.new(repository, gitaly_commits[1])] }
+ it_behaves_like 'a #list_all_commits message'
+ end
- it_behaves_like 'a #list_all_commits message'
- end
+ context 'mixed existing and nonexisting commits' do
+ let(:revision_existence) do
+ {
+ gitaly_commits[0].id => true,
+ gitaly_commits[1].id => false,
+ gitaly_commits[2].id => true
+ }
end
- end
- context 'with disallowed quarantine' do
- let(:allow_quarantine) { false }
+ let(:expected_commits) { [Gitlab::Git::Commit.new(repository, gitaly_commits[1])] }
- it_behaves_like 'a #list_commits message'
+ it_behaves_like 'a #list_all_commits message'
end
end
@@ -472,17 +445,7 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
}
end
- context 'with allowed quarantine' do
- let(:allow_quarantine) { true }
-
- it_behaves_like 'a #list_commits message'
- end
-
- context 'with disallowed quarantine' do
- let(:allow_quarantine) { false }
-
- it_behaves_like 'a #list_commits message'
- end
+ it_behaves_like 'a #list_commits message'
end
end
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 0c04863f466..4320c5460da 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -170,6 +170,65 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
Gitlab::Git::PreReceiveError, "something failed")
end
end
+
+ context 'with a custom hook error' do
+ let(:stdout) { nil }
+ let(:stderr) { nil }
+ let(:error_message) { "error_message" }
+ let(:custom_hook_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ error_message,
+ Gitaly::UserDeleteBranchError.new(
+ custom_hook: Gitaly::CustomHookError.new(
+ stdout: stdout,
+ stderr: stderr,
+ hook_type: Gitaly::CustomHookError::HookType::HOOK_TYPE_PRERECEIVE
+ )))
+ end
+
+ shared_examples 'a failed branch deletion' do
+ it 'raises a PreRecieveError' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_delete_branch).with(request, kind_of(Hash))
+ .and_raise(custom_hook_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq(expected_message)
+ expect(error.raw_message).to eq(expected_raw_message)
+ end
+ end
+ end
+
+ context 'when details contain stderr' do
+ let(:stderr) { "something" }
+ let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stderr }
+
+ it_behaves_like 'a failed branch deletion'
+ end
+
+ context 'when details contain stdout' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "something" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stdout }
+
+ it_behaves_like 'a failed branch deletion'
+ end
+ end
+
+ context 'with a non-detailed error' do
+ it 'raises a GRPC error' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_delete_branch).with(request, kind_of(Hash))
+ .and_raise(GRPC::Internal.new('non-detailed error'))
+
+ expect { subject }.to raise_error(GRPC::Internal)
+ end
+ end
end
describe '#user_merge_branch' do
@@ -212,6 +271,82 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
+ context 'with a custom hook error' do
+ let(:stdout) { nil }
+ let(:stderr) { nil }
+ let(:error_message) { "error_message" }
+ let(:custom_hook_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ error_message,
+ Gitaly::UserMergeBranchError.new(
+ custom_hook: Gitaly::CustomHookError.new(
+ stdout: stdout,
+ stderr: stderr,
+ hook_type: Gitaly::CustomHookError::HookType::HOOK_TYPE_PRERECEIVE
+ )))
+ end
+
+ shared_examples 'a failed merge' do
+ it 'raises a PreRecieveError' do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_merge_branch).with(kind_of(Enumerator), kind_of(Hash))
+ .and_raise(custom_hook_error)
+
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq(expected_message)
+ expect(error.raw_message).to eq(expected_raw_message)
+ end
+ end
+ end
+
+ context 'when details contain stderr without prefix' do
+ let(:stderr) { "something" }
+ let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stderr }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain stderr with prefix' do
+ let(:stderr) { "GL-HOOK-ERR: something" }
+ let(:stdout) { "GL-HOOK-ERR: stdout is overridden by stderr" }
+ let(:expected_message) { "something" }
+ let(:expected_raw_message) { stderr }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain stdout without prefix' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "something" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { stdout }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain stdout with prefix' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "GL-HOOK-ERR: something" }
+ let(:expected_message) { "something" }
+ let(:expected_raw_message) { stdout }
+
+ it_behaves_like 'a failed merge'
+ end
+
+ context 'when details contain no stderr or stdout' do
+ let(:stderr) { " \n" }
+ let(:stdout) { "\n \n" }
+ let(:expected_message) { error_message }
+ let(:expected_raw_message) { "\n \n" }
+
+ it_behaves_like 'a failed merge'
+ end
+ end
+
context 'with an exception without the detailed error' do
let(:permission_error) do
GRPC::PermissionDenied.new
@@ -340,6 +475,15 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
+ shared_examples '#user_cherry_pick with a gRPC error' do
+ it 'raises an exception' do
+ expect_any_instance_of(Gitaly::OperationService::Stub).to receive(:user_cherry_pick)
+ .and_raise(raised_error)
+
+ expect { subject }.to raise_error(expected_error, expected_error_message)
+ end
+ end
+
describe '#user_cherry_pick' do
let(:response_class) { Gitaly::UserCherryPickResponse }
@@ -354,13 +498,74 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
)
end
- before do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_cherry_pick).with(kind_of(Gitaly::UserCherryPickRequest), kind_of(Hash))
- .and_return(response)
+ context 'when errors are not raised but returned in the response' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_cherry_pick).with(kind_of(Gitaly::UserCherryPickRequest), kind_of(Hash))
+ .and_return(response)
+ end
+
+ it_behaves_like 'cherry pick and revert errors'
end
- it_behaves_like 'cherry pick and revert errors'
+ context 'when AccessCheckError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserCherryPickError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: 'something went wrong'
+ )))
+ end
+
+ let(:expected_error) { Gitlab::Git::PreReceiveError }
+ let(:expected_error_message) { "something went wrong" }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
+
+ context 'when NotAncestorError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ 'Branch diverged',
+ Gitaly::UserCherryPickError.new(
+ target_branch_diverged: Gitaly::NotAncestorError.new
+ )
+ )
+ end
+
+ let(:expected_error) { Gitlab::Git::CommitError }
+ let(:expected_error_message) { 'branch diverged' }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
+
+ context 'when MergeConflictError is raised' do
+ let(:raised_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::FAILED_PRECONDITION,
+ 'Conflict',
+ Gitaly::UserCherryPickError.new(
+ cherry_pick_conflict: Gitaly::MergeConflictError.new
+ )
+ )
+ end
+
+ let(:expected_error) { Gitlab::Git::Repository::CreateTreeError }
+ let(:expected_error_message) { }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
+
+ context 'when a non-detailed gRPC error is raised' do
+ let(:raised_error) { GRPC::Internal.new('non-detailed error') }
+ let(:expected_error) { GRPC::Internal }
+ let(:expected_error_message) { }
+
+ it_behaves_like '#user_cherry_pick with a gRPC error'
+ end
end
describe '#user_revert' do
@@ -489,21 +694,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
expect(subject).to eq(squash_sha)
end
- context "when git_error is present" do
- let(:response) do
- Gitaly::UserSquashResponse.new(git_error: "something failed")
- end
-
- it "raises a GitError exception" do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_squash).with(request, kind_of(Hash))
- .and_return(response)
-
- expect { subject }.to raise_error(
- Gitlab::Git::Repository::GitError, "something failed")
- end
- end
-
shared_examples '#user_squash with an error' do
it 'raises a GitError exception' do
expect_any_instance_of(Gitaly::OperationService::Stub)
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index 4287c32b947..2a06983417d 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -55,20 +55,54 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
describe '#execute' do
let(:importer) { described_class.new(issue, project, client) }
- it 'creates the issue and assignees' do
- expect(importer)
- .to receive(:create_issue)
- .and_return(10)
+ context 'when :issues_full_test_search is disabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: false)
+ end
+
+ it 'creates the issue and assignees but does not update search data' do
+ expect(importer)
+ .to receive(:create_issue)
+ .and_return(10)
+
+ expect(importer)
+ .to receive(:create_assignees)
+ .with(10)
+
+ expect(importer.issuable_finder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ expect(importer).not_to receive(:update_search_data)
+
+ importer.execute
+ end
+ end
- expect(importer)
- .to receive(:create_assignees)
- .with(10)
+ context 'when :issues_full_text_search feature is enabled' do
+ before do
+ stub_feature_flags(issues_full_text_search: true)
+ end
- expect(importer.issuable_finder)
- .to receive(:cache_database_id)
- .with(10)
+ it 'creates the issue and assignees and updates_search_data' do
+ expect(importer)
+ .to receive(:create_issue)
+ .and_return(10)
+
+ expect(importer)
+ .to receive(:create_assignees)
+ .with(10)
- importer.execute
+ expect(importer.issuable_finder)
+ .to receive(:cache_database_id)
+ .with(10)
+
+ expect(importer)
+ .to receive(:update_search_data)
+ .with(10)
+
+ importer.execute
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
index 6b3d18f20e9..b0f553dbef7 100644
--- a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -9,6 +9,12 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
let(:github_release_name) { 'Initial Release' }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:released_at) { Time.new(2017, 1, 1, 12, 00) }
+ let(:author) do
+ double(
+ login: 'User A',
+ id: 1
+ )
+ end
let(:github_release) do
double(
@@ -17,11 +23,23 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
name: github_release_name,
body: 'This is my release',
created_at: created_at,
- published_at: released_at
+ published_at: released_at,
+ author: author
)
end
+ def stub_email_for_github_username(user_name = 'User A', user_email = 'user@example.com')
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |instance|
+ allow(instance).to receive(:email_for_github_username)
+ .with(user_name).and_return(user_email)
+ end
+ end
+
describe '#execute' do
+ before do
+ stub_email_for_github_username
+ end
+
it 'imports the releases in bulk' do
release_hash = {
tag_name: '1.0',
@@ -45,7 +63,8 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
description: 'This is my release',
created_at: created_at,
updated_at: created_at,
- published_at: nil
+ published_at: nil,
+ author: author
)
expect(importer).to receive(:each_release).and_return([release_double])
@@ -61,6 +80,10 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
end
describe '#build_releases' do
+ before do
+ stub_email_for_github_username
+ end
+
it 'returns an Array containing release rows' do
expect(importer).to receive(:each_release).and_return([github_release])
@@ -108,11 +131,15 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
describe '#build' do
let(:release_hash) { importer.build(github_release) }
- it 'returns the attributes of the release as a Hash' do
- expect(release_hash).to be_an_instance_of(Hash)
- end
-
context 'the returned Hash' do
+ before do
+ stub_email_for_github_username
+ end
+
+ it 'returns the attributes of the release as a Hash' do
+ expect(release_hash).to be_an_instance_of(Hash)
+ end
+
it 'includes the tag name' do
expect(release_hash[:tag]).to eq('1.0')
end
@@ -137,6 +164,39 @@ RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter do
expect(release_hash[:name]).to eq(github_release_name)
end
end
+
+ context 'author_id attribute' do
+ it 'returns the Gitlab user_id when Github release author is found' do
+ # Stub user email which matches a Gitlab user.
+ stub_email_for_github_username('User A', project.users.first.email)
+
+ # Disable cache read as the redis cache key can be set by other specs.
+ # https://gitlab.com/gitlab-org/gitlab/-/blob/88bffda004e0aca9c4b9f2de86bdbcc0b49f2bc7/lib/gitlab/github_import/user_finder.rb#L75
+ # Above line can return different user when read from cache.
+ allow(Gitlab::Cache::Import::Caching).to receive(:read).and_return(nil)
+
+ expect(release_hash[:author_id]).to eq(project.users.first.id)
+ end
+
+ it 'returns ghost user when author is empty in Github release' do
+ allow(github_release).to receive(:author).and_return(nil)
+
+ expect(release_hash[:author_id]).to eq(Gitlab::GithubImport.ghost_user_id)
+ end
+
+ context 'when Github author is not found in Gitlab' do
+ let(:author) { double(login: 'octocat', id: 1 ) }
+
+ before do
+ # Stub user email which does not match a Gitlab user.
+ stub_email_for_github_username('octocat', 'octocat@example.com')
+ end
+
+ it 'returns project creator as author' do
+ expect(release_hash[:author_id]).to eq(project.creator_id)
+ end
+ end
+ end
end
describe '#each_release' do
diff --git a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
index 4cd9f9dfad0..1924cd687e4 100644
--- a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
describe ".parameters" do
let(:start_time) { Time.new(2018, 01, 01) }
- describe 'when no proxy time is available' do
+ describe 'when no proxy duration is available' do
let(:mock_request) { double('env', env: {}) }
it 'returns an empty hash' do
@@ -16,20 +16,18 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
end
end
- describe 'when a proxy time is available' do
+ describe 'when a proxy duration is available' do
let(:mock_request) do
double('env',
env: {
- 'HTTP_GITLAB_WORKHORSE_PROXY_START' => (start_time - 1.hour).to_i * (10**9)
+ 'GITLAB_RAILS_QUEUE_DURATION' => 2.seconds
}
)
end
- it 'returns the correct duration in seconds' do
+ it 'adds the duration to log parameters' do
travel_to(start_time) do
- subject.before
-
- expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 1.hour.to_f })
+ expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 2.seconds.to_f })
end
end
end
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
index 0c548e1ce32..ac512e28e7b 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
@@ -103,4 +103,36 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
.to contain_exactly(:base_authorization, :sub_authorization)
end
end
+
+ describe 'authorizes_object?' do
+ it 'is false by default' do
+ a_class = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+ end
+
+ expect(a_class).not_to be_authorizes_object
+ end
+
+ it 'is true after calling authorizes_object!' do
+ a_class = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+
+ expect(a_class).to be_authorizes_object
+ end
+
+ it 'is true if a parent authorizes_object' do
+ parent = Class.new do
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ authorizes_object!
+ end
+
+ child = Class.new(parent)
+
+ expect(child).to be_authorizes_object
+ end
+ end
end
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index ed3f19d8cf2..974951ab30c 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::MarkdownField do
include Gitlab::Routing
+ include GraphqlHelpers
describe '.markdown_field' do
it 'creates the field with some default attributes' do
@@ -21,21 +22,12 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
expect { class_with_markdown_field(:test_html, null: true, resolver: 'not really') }
.to raise_error(expected_error)
end
-
- # TODO: remove as part of https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27536
- # so that until that time, the developer check is there
- it 'raises when passing a resolve block' do
- expect { class_with_markdown_field(:test_html, null: true, resolve: -> (_, _, _) { 'not really' } ) }
- .to raise_error(expected_error)
- end
end
context 'resolving markdown' do
let_it_be(:note) { build(:note, note: '# Markdown!') }
let_it_be(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
- let_it_be(:query_type) { GraphQL::ObjectType.new }
- let_it_be(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
+ let_it_be(:query) { GraphQL::Query.new(empty_schema, document: nil, context: {}, variables: {}) }
let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: {}, object: nil) }
let(:type_class) { class_with_markdown_field(:note_html, null: false) }
@@ -55,6 +47,20 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
end
+ context 'when a block is passed for the resolved object' do
+ let(:type_class) do
+ class_with_markdown_field(:note_html, null: false) do |resolved_object|
+ resolved_object.object
+ end
+ end
+
+ let(:type_instance) { type_class.authorized_new(class_wrapped_object(note), context) }
+
+ it 'renders markdown from the same property as the field name without the `_html` suffix' do
+ expect(field.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ end
+ end
+
describe 'basic verification that references work' do
let_it_be(:project) { create(:project, :public) }
@@ -83,12 +89,22 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
end
- def class_with_markdown_field(name, **args)
+ def class_with_markdown_field(name, **args, &blk)
Class.new(Types::BaseObject) do
prepend Gitlab::Graphql::MarkdownField
graphql_name 'MarkdownFieldTest'
- markdown_field name, **args
+ markdown_field name, **args, &blk
end
end
+
+ def class_wrapped_object(object)
+ Class.new do
+ def initialize(object)
+ @object = object
+ end
+
+ attr_accessor :object
+ end.new(object)
+ end
end
diff --git a/spec/lib/gitlab/graphql/negatable_arguments_spec.rb b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
index 71ef75836c0..04ee1c1b820 100644
--- a/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
+++ b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
@@ -25,7 +25,9 @@ RSpec.describe Gitlab::Graphql::NegatableArguments do
expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(['foo'])
end
- it 'defines all arguments passed as block even if called multiple times' do
+ # TODO: suffers from the `DuplicateNamesError` error. skip until we upgrade
+ # to the graphql 2.0 gem https://gitlab.com/gitlab-org/gitlab/-/issues/363131
+ xit 'defines all arguments passed as block even if called multiple times' do
test_resolver.negated do
argument :foo, GraphQL::Types::String, required: false
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
index b6c3cb4e04a..97613edee5e 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
@@ -9,9 +9,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
# The spec will be merged with connection_spec.rb in the future.
let(:nodes) { Project.all.order(id: :asc) }
let(:arguments) { {} }
- let(:query_type) { GraphQL::ObjectType.new }
- let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: query_double(schema: schema), values: nil, object: nil) }
+ let(:context) { GraphQL::Query::Context.new(query: query_double, values: nil, object: nil) }
let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) }
let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index a4ba288b7f1..61a79d90546 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -7,9 +7,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.all.order(id: :asc) }
let(:arguments) { {} }
- let(:query_type) { GraphQL::ObjectType.new }
- let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: query_double(schema: schema), values: nil, object: nil) }
+ let(:context) { GraphQL::Query::Context.new(query: query_double, values: nil, object: nil) }
subject(:connection) do
described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
diff --git a/spec/lib/gitlab/graphql/present/field_extension_spec.rb b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
index 5f0f444e0bb..49992d7b71f 100644
--- a/spec/lib/gitlab/graphql/present/field_extension_spec.rb
+++ b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
@@ -143,23 +143,6 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
it_behaves_like 'calling the presenter method'
end
- # This is exercised here using an explicit `resolve:` proc, but
- # @resolver_proc values are used in field instrumentation as well.
- context 'when the field uses a resolve proc' do
- let(:presenter) { base_presenter }
- let(:field) do
- ::Types::BaseField.new(
- name: field_name,
- type: GraphQL::Types::String,
- null: true,
- owner: owner,
- resolve: ->(obj, args, ctx) { 'Hello from a proc' }
- )
- end
-
- specify { expect(resolve_value).to eq 'Hello from a proc' }
- end
-
context 'when the presenter provides a new method' do
def presenter
Class.new(base_presenter) do
diff --git a/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb
new file mode 100644
index 00000000000..a5274d49fdb
--- /dev/null
+++ b/spec/lib/gitlab/graphql/query_analyzers/ast/logger_analyzer_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer do
+ let(:query) { GraphQL::Query.new(GitlabSchema, document: document, context: {}, variables: { body: 'some note' }) }
+ let(:document) do
+ GraphQL.parse <<-GRAPHQL
+ mutation createNote($body: String!) {
+ createNote(input: {noteableId: "gid://gitlab/Noteable/1", body: $body}) {
+ note {
+ id
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ describe '#result' do
+ let(:monotonic_time_before) { 42 }
+ let(:monotonic_time_after) { 500 }
+ let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
+
+ before do
+ RequestStore.store[:graphql_logs] = nil
+
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time)
+ .and_return(monotonic_time_before, monotonic_time_before,
+ monotonic_time_before, monotonic_time_before,
+ monotonic_time_after)
+ end
+
+ it 'returns the complexity, depth, duration, etc' do
+ results = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: [])
+ result = results.first
+
+ expect(result[:duration_s]).to eq monotonic_time_duration
+ expect(result[:depth]).to eq 3
+ expect(result[:complexity]).to eq 3
+ expect(result[:used_fields]).to eq ['Note.id', 'CreateNotePayload.note', 'Mutation.createNote']
+ expect(result[:used_deprecated_fields]).to eq []
+
+ request = result.except(:duration_s).merge({
+ operation_name: 'createNote',
+ variables: { body: "[FILTERED]" }.to_s
+ })
+
+ expect(RequestStore.store[:graphql_logs]).to match([request])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb
new file mode 100644
index 00000000000..997fb129f42
--- /dev/null
+++ b/spec/lib/gitlab/graphql/query_analyzers/ast/recursion_analyzer_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::QueryAnalyzers::AST::RecursionAnalyzer do
+ let(:query) { GraphQL::Query.new(GitlabSchema, document: document, context: {}, variables: { body: 'some note' }) }
+
+ context 'when recursion threshold not exceeded' do
+ let(:document) do
+ GraphQL.parse <<-GRAPHQL
+ query recurse {
+ group(fullPath: "h5bp") {
+ projects {
+ nodes {
+ name
+ group {
+ projects {
+ nodes {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns the complexity, depth, duration, etc' do
+ result = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: [])
+
+ expect(result.first).to be_nil
+ end
+ end
+
+ context 'when recursion threshold exceeded' do
+ let(:document) do
+ GraphQL.parse <<-GRAPHQL
+ query recurse {
+ group(fullPath: "h5bp") {
+ projects {
+ nodes {
+ name
+ group {
+ projects {
+ nodes {
+ name
+ group {
+ projects {
+ nodes {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns error' do
+ result = GraphQL::Analysis::AST.analyze_query(query, [described_class], multiplex_analyzers: [])
+
+ expect(result.first.is_a?(GraphQL::AnalysisError)).to be_truthy
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
deleted file mode 100644
index dee8f9e3c64..00000000000
--- a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do
- let(:initial_value) { analyzer.initial_value(query) }
- let(:analyzer) { described_class.new }
- let(:query) { GraphQL::Query.new(GitlabSchema, document: document, context: {}, variables: { body: "some note" }) }
- let(:document) do
- GraphQL.parse <<-GRAPHQL
- mutation createNote($body: String!) {
- createNote(input: {noteableId: "1", body: $body}) {
- note {
- id
- }
- }
- }
- GRAPHQL
- end
-
- describe '#final_value' do
- let(:monotonic_time_before) { 42 }
- let(:monotonic_time_after) { 500 }
- let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
- let(:memo) { initial_value }
-
- subject(:final_value) { analyzer.final_value(memo) }
-
- before do
- RequestStore.store[:graphql_logs] = nil
-
- allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2, [[], []]])
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
- allow(Gitlab::GraphqlLogger).to receive(:info)
- end
-
- it 'inserts duration in seconds to memo and sets request store' do
- expect { final_value }.to change { memo[:duration_s] }.to(monotonic_time_duration)
- .and change { RequestStore.store[:graphql_logs] }.to([{
- complexity: 4,
- depth: 2,
- operation_name: query.operation_name,
- used_deprecated_fields: [],
- used_fields: [],
- variables: { body: "[FILTERED]" }.to_s
- }])
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
index 5bc077a963e..20792fb4554 100644
--- a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
+++ b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do
use Gitlab::Graphql::Tracers::LoggerTracer
use Gitlab::Graphql::Tracers::TimerTracer
- query_analyzer Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer.new
+ query_analyzer Gitlab::Graphql::QueryAnalyzers::AST::LoggerAnalyzer
query Graphql::FakeQueryType
end
@@ -20,11 +20,11 @@ RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do
end
end
- it "logs every query", :aggregate_failures do
+ it "logs every query", :aggregate_failures, :unlimited_max_formatted_output_length do
variables = { name: "Ada Lovelace" }
query_string = 'query fooOperation($name: String) { helloWorld(message: $name) }'
- # Build an actual query so we don't have to hardocde the "fingerprint" calculations
+ # Build an actual query so we don't have to hardcode the "fingerprint" calculations
query = GraphQL::Query.new(dummy_schema, query_string, variables: variables)
expect(::Gitlab::GraphqlLogger).to receive(:info).with({
diff --git a/spec/lib/gitlab/hash_digest/facade_spec.rb b/spec/lib/gitlab/hash_digest/facade_spec.rb
new file mode 100644
index 00000000000..b352744513e
--- /dev/null
+++ b/spec/lib/gitlab/hash_digest/facade_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HashDigest::Facade do
+ describe '.hexdigest' do
+ let(:plaintext) { 'something that is plaintext' }
+
+ let(:sha256_hash) { OpenSSL::Digest::SHA256.hexdigest(plaintext) }
+ let(:md5_hash) { Digest::MD5.hexdigest(plaintext) } # rubocop:disable Fips/MD5
+
+ it 'uses SHA256' do
+ expect(described_class.hexdigest(plaintext)).to eq(sha256_hash)
+ end
+
+ context 'when feature flags is not available' do
+ before do
+ allow(Feature).to receive(:feature_flags_available?).and_return(false)
+ end
+
+ it 'uses MD5' do
+ expect(described_class.hexdigest(plaintext)).to eq(md5_hash)
+ end
+ end
+
+ context 'when active_support_hash_digest_sha256 FF is disabled' do
+ before do
+ stub_feature_flags(active_support_hash_digest_sha256: false)
+ end
+
+ it 'uses MD5' do
+ expect(described_class.hexdigest(plaintext)).to eq(md5_hash)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 65d8c59fea7..537e59d91c3 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -124,27 +124,14 @@ RSpec.describe Gitlab::Highlight do
context 'timeout' do
subject(:highlight) { described_class.new('file.rb', 'begin', language: 'ruby').highlight('Content') }
- it 'utilizes timeout for web' do
- expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_FOREGROUND).and_call_original
-
- highlight
- end
-
it 'falls back to plaintext on timeout' do
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(Timeout).to receive(:timeout).and_raise(Timeout::Error)
+ expect(Gitlab::RenderTimeout).to receive(:timeout).and_raise(Timeout::Error)
expect(Rouge::Lexers::PlainText).to receive(:lex).and_call_original
highlight
end
-
- it 'utilizes longer timeout for sidekiq' do
- allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
- expect(Timeout).to receive(:timeout).with(described_class::TIMEOUT_BACKGROUND).and_call_original
-
- highlight
- end
end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 1546b6a26c8..9d516c8d7ac 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -65,6 +65,7 @@ issues:
- customer_relations_contacts
- issue_customer_relations_contacts
- email
+- issuable_resource_links
work_item_type:
- issues
events:
@@ -274,6 +275,7 @@ ci_pipelines:
- security_findings
- daily_build_group_report_results
- latest_builds
+- latest_successful_builds
- daily_report_results
- latest_builds_report_results
- messages
@@ -336,6 +338,8 @@ integrations:
- jira_tracker_data
- zentao_tracker_data
- issue_tracker_data
+# dingtalk_tracker_data JiHu-specific, see https://jihulab.com/gitlab-cn/gitlab/-/merge_requests/417
+- dingtalk_tracker_data
hooks:
- project
- web_hook_logs
@@ -414,6 +418,8 @@ project:
- pushover_integration
- jira_integration
- zentao_integration
+# dingtalk_integration JiHu-specific, see https://jihulab.com/gitlab-cn/gitlab/-/merge_requests/417
+- dingtalk_integration
- redmine_integration
- youtrack_integration
- custom_issue_tracker_integration
@@ -613,6 +619,7 @@ project:
- secure_files
- security_trainings
- vulnerability_reads
+- build_artifacts_size_refresh
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/lfs_saver_spec.rb b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
index 84bd782c467..aa456736f78 100644
--- a/spec/lib/gitlab/import_export/lfs_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
@@ -45,6 +45,18 @@ RSpec.describe Gitlab::ImportExport::LfsSaver do
expect(File).to exist("#{shared.export_path}/lfs-objects/#{lfs_object.oid}")
end
+ context 'when lfs object has file on disk missing' do
+ it 'does not attempt to copy non-existent file' do
+ FileUtils.rm(lfs_object.file.path)
+ expect(saver).not_to receive(:copy_files)
+
+ saver.save # rubocop:disable Rails/SaveBang
+
+ expect(shared.errors).to be_empty
+ expect(File).not_to exist("#{shared.export_path}/lfs-objects/#{lfs_object.oid}")
+ end
+ end
+
describe 'saving a json file' do
before do
# Create two more LfsObjectProject records with different `repository_type`s
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index e06fcb0cd3f..d7f07a1eadf 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -585,6 +585,7 @@ ProjectFeature:
- operations_access_level
- security_and_compliance_access_level
- container_registry_access_level
+- package_registry_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -858,6 +859,10 @@ Epic:
- external_key
- confidential
- color
+ - total_opened_issue_weight
+ - total_closed_issue_weight
+ - total_opened_issue_count
+ - total_closed_issue_count
EpicIssue:
- id
- relative_position
diff --git a/spec/lib/gitlab/import_sources_spec.rb b/spec/lib/gitlab/import_sources_spec.rb
index 416d651b0de..f42a109aa3a 100644
--- a/spec/lib/gitlab/import_sources_spec.rb
+++ b/spec/lib/gitlab/import_sources_spec.rb
@@ -7,17 +7,17 @@ RSpec.describe Gitlab::ImportSources do
it 'returns a hash' do
expected =
{
- 'GitHub' => 'github',
- 'Bitbucket Cloud' => 'bitbucket',
- 'Bitbucket Server' => 'bitbucket_server',
- 'GitLab.com' => 'gitlab',
- 'Google Code' => 'google_code',
- 'FogBugz' => 'fogbugz',
- 'Repo by URL' => 'git',
- 'GitLab export' => 'gitlab_project',
- 'Gitea' => 'gitea',
- 'Manifest file' => 'manifest',
- 'Phabricator' => 'phabricator'
+ 'GitHub' => 'github',
+ 'Bitbucket Cloud' => 'bitbucket',
+ 'Bitbucket Server' => 'bitbucket_server',
+ 'GitLab.com' => 'gitlab',
+ 'Google Code' => 'google_code',
+ 'FogBugz' => 'fogbugz',
+ 'Repository by URL' => 'git',
+ 'GitLab export' => 'gitlab_project',
+ 'Gitea' => 'gitea',
+ 'Manifest file' => 'manifest',
+ 'Phabricator' => 'phabricator'
}
expect(described_class.options).to eq(expected)
@@ -93,7 +93,7 @@ RSpec.describe Gitlab::ImportSources do
'gitlab' => 'GitLab.com',
'google_code' => 'Google Code',
'fogbugz' => 'FogBugz',
- 'git' => 'Repo by URL',
+ 'git' => 'Repository by URL',
'gitlab_project' => 'GitLab export',
'gitea' => 'Gitea',
'manifest' => 'Manifest file',
diff --git a/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb b/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
index 4eb2388f3f7..7afb80488d8 100644
--- a/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
+++ b/spec/lib/gitlab/inactive_projects_deletion_warning_tracker_spec.rb
@@ -2,14 +2,12 @@
require "spec_helper"
-RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker do
+RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker, :freeze_time do
let_it_be(:project_id) { 1 }
describe '.notified_projects', :clean_gitlab_redis_shared_state do
before do
- freeze_time do
- Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
- end
+ Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
end
it 'returns the list of projects for which deletion warning email has been sent' do
@@ -53,6 +51,46 @@ RSpec.describe Gitlab::InactiveProjectsDeletionWarningTracker do
end
end
+ describe '#notification_date', :clean_gitlab_redis_shared_state do
+ before do
+ Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
+ end
+
+ it 'returns the date if a deletion warning email has been sent for a given project' do
+ expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).notification_date).to eq("#{Date.current}")
+ end
+
+ it 'returns nil if a deletion warning email has not been sent for a given project' do
+ expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(2).notification_date).to eq(nil)
+ end
+ end
+
+ describe '#scheduled_deletion_date', :clean_gitlab_redis_shared_state do
+ shared_examples 'returns the expected deletion date' do
+ it do
+ expect(Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).scheduled_deletion_date)
+ .to eq(1.month.from_now.to_date.to_s)
+ end
+ end
+
+ before do
+ stub_application_setting(inactive_projects_delete_after_months: 2)
+ stub_application_setting(inactive_projects_send_warning_email_after_months: 1)
+ end
+
+ context 'without a stored deletion email date' do
+ it_behaves_like 'returns the expected deletion date'
+ end
+
+ context 'with a stored deletion email date' do
+ before do
+ Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
+ end
+
+ it_behaves_like 'returns the expected deletion date'
+ end
+ end
+
describe '#reset' do
before do
Gitlab::InactiveProjectsDeletionWarningTracker.new(project_id).mark_notified
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 5fea355ab4f..79d626386d4 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -110,6 +110,14 @@ RSpec.describe Gitlab::InstrumentationHelper do
expect(payload).to include(:pid)
end
+ it 'logs the worker ID' do
+ expect(Prometheus::PidProvider).to receive(:worker_id).and_return('puma_1')
+
+ subject
+
+ expect(payload).to include(worker_id: 'puma_1')
+ end
+
context 'when logging memory allocations' do
include MemoryInstrumentationHelper
diff --git a/spec/lib/gitlab/jira_import/base_importer_spec.rb b/spec/lib/gitlab/jira_import/base_importer_spec.rb
index 479551095de..70a594b09af 100644
--- a/spec/lib/gitlab/jira_import/base_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/base_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::BaseImporter do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let(:project) { create(:project) }
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index aead5405bd1..565a9ad17e1 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::IssuesImporter do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:current_user) { create(:user) }
diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
index 71440590815..4fb5e363475 100644
--- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::JiraImport::LabelsImporter do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index a7c73e79641..972b0ab6ed1 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::JiraImport do
let(:project_id) { 321 }
describe '.validate_project_settings!' do
- include JiraServiceHelper
+ include JiraIntegrationHelpers
let_it_be(:project, reload: true) { create(:project) }
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index e69edbe6dc0..1800b42160d 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -260,28 +260,6 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
)
end
- context 'when importing a GitHub project' do
- let(:api_root) { 'https://api.github.com' }
- let(:repo_root) { 'https://github.com' }
-
- subject { described_class.new(project) }
-
- it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute'
- it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute an error occurs'
- it_behaves_like 'Gitlab::LegacyGithubImport unit-testing'
-
- describe '#client' do
- it 'instantiates a Client' do
- allow(project).to receive(:import_data).and_return(double(credentials: credentials))
- expect(Gitlab::LegacyGithubImport::Client).to receive(:new).with(
- credentials[:user]
- )
-
- subject.client
- end
- end
- end
-
context 'when importing a Gitea project' do
let(:api_root) { 'https://try.gitea.io/api/v1' }
let(:repo_root) { 'https://try.gitea.io' }
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index 12fb12ebd87..06a25be757e 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -303,6 +303,7 @@ RSpec.describe Gitlab::MailRoom do
delivery_method: 'postback',
delivery_options: {
delivery_url: "http://gitlab.example/api/v4/internal/mail_room/incoming_email",
+ content_type: "text/plain",
jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
jwt_algorithm: 'HS256',
@@ -316,6 +317,7 @@ RSpec.describe Gitlab::MailRoom do
delivery_method: 'postback',
delivery_options: {
delivery_url: "http://gitlab.example/api/v4/internal/mail_room/service_desk_email",
+ content_type: "text/plain",
jwt_auth_header: Gitlab::MailRoom::INTERNAL_API_REQUEST_HEADER,
jwt_issuer: Gitlab::MailRoom::INTERNAL_API_REQUEST_JWT_ISSUER,
jwt_algorithm: 'HS256',
diff --git a/spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb b/spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb
new file mode 100644
index 00000000000..a2286415e96
--- /dev/null
+++ b/spec/lib/gitlab/mailgun/webhook_processors/failure_logger_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Mailgun::WebhookProcessors::FailureLogger do
+ describe '#execute', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ let(:base_payload) do
+ {
+ 'id' => 'U2kZkAiuScqcMTq-8Atz-Q',
+ 'event' => 'failed',
+ 'recipient' => 'recipient@gitlab.com',
+ 'reason' => 'bounce',
+ 'delivery-status' => {
+ 'code' => '421',
+ 'message' => '4.4.2 mxfront9g.mail.example.com Error: timeout exceeded'
+ }
+ }
+ end
+
+ context 'on permanent failure' do
+ let(:processor) { described_class.new(base_payload.merge({ 'severity' => 'permanent' })) }
+
+ it 'logs the failure immediately' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ event: 'email_delivery_failure',
+ mailgun_event_id: base_payload['id'],
+ recipient: base_payload['recipient'],
+ failure_type: 'permanent',
+ failure_reason: base_payload['reason'],
+ failure_code: base_payload['delivery-status']['code'],
+ failure_message: base_payload['delivery-status']['message']
+ )
+
+ processor.execute
+ end
+ end
+
+ context 'on temporary failure' do
+ let(:processor) { described_class.new(base_payload.merge({ 'severity' => 'temporary' })) }
+
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits)
+ .and_return(temporary_email_failure: { threshold: 1, interval: 1.minute })
+ end
+
+ context 'when threshold is not exceeded' do
+ it 'increments counter but does not log the failure' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(
+ :temporary_email_failure, scope: 'recipient@gitlab.com'
+ ).and_call_original
+ expect(Gitlab::ErrorTracking::Logger).not_to receive(:error)
+
+ processor.execute
+ end
+ end
+
+ context 'when threshold is exceeded' do
+ before do
+ processor.execute
+ end
+
+ it 'increments counter and logs the failure' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(
+ :temporary_email_failure, scope: 'recipient@gitlab.com'
+ ).and_call_original
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ event: 'email_delivery_failure',
+ mailgun_event_id: base_payload['id'],
+ recipient: base_payload['recipient'],
+ failure_type: 'temporary',
+ failure_reason: base_payload['reason'],
+ failure_code: base_payload['delivery-status']['code'],
+ failure_message: base_payload['delivery-status']['message']
+ )
+
+ processor.execute
+ end
+ end
+ end
+
+ context 'on other events' do
+ let(:processor) { described_class.new(base_payload.merge({ 'event' => 'delivered' })) }
+
+ it 'does nothing' do
+ expect(Gitlab::ErrorTracking::Logger).not_to receive(:error)
+ expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+
+ processor.execute
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb b/spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb
new file mode 100644
index 00000000000..3bd364b0d15
--- /dev/null
+++ b/spec/lib/gitlab/mailgun/webhook_processors/member_invites_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Mailgun::WebhookProcessors::MemberInvites do
+ describe '#execute', :aggregate_failures do
+ let_it_be(:member) { create(:project_member, :invited) }
+
+ let(:raw_invite_token) { member.raw_invite_token }
+ let(:payload) do
+ {
+ 'event' => 'failed',
+ 'severity' => 'permanent',
+ 'tags' => [Members::Mailgun::INVITE_EMAIL_TAG],
+ 'user-variables' => { ::Members::Mailgun::INVITE_EMAIL_TOKEN_KEY => raw_invite_token }
+ }
+ end
+
+ subject(:service) { described_class.new(payload).execute }
+
+ it 'marks the member invite email success as false' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: /^UPDATED MEMBER INVITE_EMAIL_SUCCESS/,
+ event: 'updated_member_invite_email_success'
+ ).and_call_original
+
+ expect { service }.to change { member.reload.invite_email_success }.from(true).to(false)
+ end
+
+ context 'when invite token is not found in payload' do
+ before do
+ payload.delete('user-variables')
+ end
+
+ it 'does not change member status and logs an error' do
+ expect(Gitlab::AppLogger).not_to receive(:info)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ an_instance_of(described_class::ProcessWebhookServiceError))
+
+ expect { service }.not_to change { member.reload.invite_email_success }
+ end
+ end
+
+ shared_examples 'does nothing' do
+ it 'does not change member status' do
+ expect(Gitlab::AppLogger).not_to receive(:info)
+
+ expect { service }.not_to change { member.reload.invite_email_success }
+ end
+ end
+
+ context 'when member can not be found' do
+ let(:raw_invite_token) { '_foobar_' }
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'when failure is temporary' do
+ before do
+ payload['severity'] = 'temporary'
+ end
+
+ it_behaves_like 'does nothing'
+ end
+
+ context 'when email is not a member invite' do
+ before do
+ payload.delete('tags')
+ end
+
+ it_behaves_like 'does nothing'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/jemalloc_spec.rb b/spec/lib/gitlab/memory/jemalloc_spec.rb
new file mode 100644
index 00000000000..8847516b52c
--- /dev/null
+++ b/spec/lib/gitlab/memory/jemalloc_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Memory::Jemalloc do
+ let(:outdir) { Dir.mktmpdir }
+
+ after do
+ FileUtils.rm_f(outdir)
+ end
+
+ context 'when jemalloc is loaded' do
+ let(:fiddle_func) { instance_double(::Fiddle::Function) }
+
+ context 'with JSON format' do
+ let(:format) { :json }
+ let(:output) { '{"a": 24}' }
+
+ before do
+ stub_stats_call(output, 'J')
+ end
+
+ describe '.stats' do
+ it 'returns stats JSON' do
+ expect(described_class.stats(format: format)).to eq(output)
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'writes stats JSON file' do
+ described_class.dump_stats(path: outdir, format: format)
+
+ file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.json$/) }
+ expect(file).not_to be_nil
+ expect(File.read(File.join(outdir, file))).to eq(output)
+ end
+ end
+ end
+
+ context 'with text format' do
+ let(:format) { :text }
+ let(:output) { 'stats' }
+
+ before do
+ stub_stats_call(output)
+ end
+
+ describe '.stats' do
+ it 'returns a text report' do
+ expect(described_class.stats(format: format)).to eq(output)
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'writes stats text file' do
+ described_class.dump_stats(path: outdir, format: format)
+
+ file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.txt$/) }
+ expect(file).not_to be_nil
+ expect(File.read(File.join(outdir, file))).to eq(output)
+ end
+ end
+ end
+
+ context 'with unsupported format' do
+ let(:format) { 'unsupported' }
+
+ describe '.stats' do
+ it 'raises an error' do
+ expect do
+ described_class.stats(format: format)
+ end.to raise_error(/format must be one of/)
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'raises an error' do
+ expect do
+ described_class.dump_stats(path: outdir, format: format)
+ end.to raise_error(/format must be one of/)
+ end
+ end
+ end
+ end
+
+ context 'when jemalloc is not loaded' do
+ before do
+ expect(::Fiddle::Handle).to receive(:sym).and_raise(Fiddle::DLError)
+ end
+
+ describe '.stats' do
+ it 'returns nil' do
+ expect(described_class.stats).to be_nil
+ end
+ end
+
+ describe '.dump_stats' do
+ it 'does nothing' do
+ stub_env('LD_PRELOAD', nil)
+
+ described_class.dump_stats(path: outdir)
+
+ expect(Dir.empty?(outdir)).to be(true)
+ end
+ end
+ end
+
+ def stub_stats_call(output, expected_options = '')
+ # Stub function pointer to stats call.
+ func_pointer = Fiddle::Pointer.new(0xd34db33f)
+ expect(::Fiddle::Handle).to receive(:sym).with('malloc_stats_print').and_return(func_pointer)
+
+ # Stub actual function call.
+ expect(::Fiddle::Function).to receive(:new)
+ .with(func_pointer, anything, anything)
+ .and_return(fiddle_func)
+ expect(fiddle_func).to receive(:call).with(anything, nil, expected_options) do |callback, _, options|
+ callback.call(nil, output)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index 14a4c01fce3..52908a0b339 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
Gitlab::Metrics::Dashboard::Stages::CustomMetricsDetailsInserter,
Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
- Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
Gitlab::Metrics::Dashboard::Stages::UrlValidator
]
@@ -118,43 +117,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
end
end
- context 'when the dashboard references persisted metrics with alerts' do
- let!(:alert) do
- create(
- :prometheus_alert,
- environment: environment,
- project: project,
- prometheus_metric: persisted_metric
- )
- end
-
- shared_examples_for 'has saved alerts' do
- it 'includes an alert path' do
- target_metric = all_metrics.find { |metric| metric[:metric_id] == persisted_metric.id }
-
- expect(target_metric).to be_a Hash
- expect(target_metric).to include(:alert_path)
- expect(target_metric[:alert_path]).to include(
- project.path,
- persisted_metric.id.to_s,
- environment.id.to_s
- )
- end
- end
-
- context 'that are shared across projects' do
- let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
-
- it_behaves_like 'has saved alerts'
- end
-
- context 'when the project has associated metrics' do
- let!(:persisted_metric) { create(:prometheus_metric, project: project, group: :business) }
-
- it_behaves_like 'has saved alerts'
- end
- end
-
context 'when there are no alerts' do
let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index c88d8c17eac..57790ad78a8 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
let(:main_replica_host) { main_load_balancer.host }
let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
- let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') }
+ let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') }
let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
let(:ci_replica_host) { double(:host, connection: ci_connection) }
let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
@@ -121,7 +121,7 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do
let(:main_replica_host) { main_load_balancer.host }
let(:ci_load_balancer) { double(:load_balancer, host_list: ci_host_list, configuration: configuration) }
- let(:configuration) { double(:configuration, primary_connection_specification_name: 'Ci::ApplicationRecord') }
+ let(:configuration) { double(:configuration, connection_specification_name: 'Ci::ApplicationRecord') }
let(:ci_host_list) { double(:host_list, hosts: [ci_replica_host]) }
let(:ci_replica_host) { double(:host, connection: ci_connection) }
let(:ci_connection) { double(:connection, pool: Ci::ApplicationRecord.connection_pool) }
diff --git a/spec/lib/gitlab/metrics/sli_spec.rb b/spec/lib/gitlab/metrics/sli_spec.rb
index 9b776d6738d..102ea442b3a 100644
--- a/spec/lib/gitlab/metrics/sli_spec.rb
+++ b/spec/lib/gitlab/metrics/sli_spec.rb
@@ -17,13 +17,13 @@ RSpec.describe Gitlab::Metrics::Sli do
it 'allows different SLIs to be defined on each subclass' do
apdex_counters = [
- fake_total_counter('foo', 'apdex'),
- fake_numerator_counter('foo', 'apdex', 'success')
+ fake_total_counter('foo_apdex'),
+ fake_numerator_counter('foo_apdex', 'success')
]
error_rate_counters = [
- fake_total_counter('foo', 'error_rate'),
- fake_numerator_counter('foo', 'error_rate', 'error')
+ fake_total_counter('foo'),
+ fake_numerator_counter('foo', 'error')
]
apdex = described_class::Apdex.initialize_sli(:foo, [{ hello: :world }])
@@ -40,13 +40,17 @@ RSpec.describe Gitlab::Metrics::Sli do
end
subclasses = {
- Gitlab::Metrics::Sli::Apdex => :success,
- Gitlab::Metrics::Sli::ErrorRate => :error
+ Gitlab::Metrics::Sli::Apdex => {
+ suffix: '_apdex',
+ numerator: :success
+ },
+ Gitlab::Metrics::Sli::ErrorRate => {
+ suffix: '',
+ numerator: :error
+ }
}
- subclasses.each do |subclass, numerator_type|
- subclass_type = subclass.to_s.demodulize.underscore
-
+ subclasses.each do |subclass, subclass_info|
describe subclass do
describe 'Class methods' do
before do
@@ -73,8 +77,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '.initialize_sli' do
it 'returns and stores a new initialized SLI' do
counters = [
- fake_total_counter(:bar, subclass_type),
- fake_numerator_counter(:bar, subclass_type, numerator_type)
+ fake_total_counter("bar#{subclass_info[:suffix]}"),
+ fake_numerator_counter("bar#{subclass_info[:suffix]}", subclass_info[:numerator])
]
sli = described_class.initialize_sli(:bar, [{ hello: :world }])
@@ -86,8 +90,8 @@ RSpec.describe Gitlab::Metrics::Sli do
it 'does not change labels for an already-initialized SLI' do
counters = [
- fake_total_counter(:bar, subclass_type),
- fake_numerator_counter(:bar, subclass_type, numerator_type)
+ fake_total_counter("bar#{subclass_info[:suffix]}"),
+ fake_numerator_counter("bar#{subclass_info[:suffix]}", subclass_info[:numerator])
]
sli = described_class.initialize_sli(:bar, [{ hello: :world }])
@@ -106,8 +110,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '.initialized?' do
before do
- fake_total_counter(:boom, subclass_type)
- fake_numerator_counter(:boom, subclass_type, numerator_type)
+ fake_total_counter("boom#{subclass_info[:suffix]}")
+ fake_numerator_counter("boom#{subclass_info[:suffix]}", subclass_info[:numerator])
end
it 'is true when an SLI was initialized with labels' do
@@ -125,8 +129,8 @@ RSpec.describe Gitlab::Metrics::Sli do
describe '#initialize_counters' do
it 'initializes counters for the passed label combinations' do
counters = [
- fake_total_counter(:hey, subclass_type),
- fake_numerator_counter(:hey, subclass_type, numerator_type)
+ fake_total_counter("hey#{subclass_info[:suffix]}"),
+ fake_numerator_counter("hey#{subclass_info[:suffix]}", subclass_info[:numerator])
]
described_class.new(:hey).initialize_counters([{ foo: 'bar' }, { foo: 'baz' }])
@@ -138,18 +142,18 @@ RSpec.describe Gitlab::Metrics::Sli do
describe "#increment" do
let!(:sli) { described_class.new(:heyo) }
- let!(:total_counter) { fake_total_counter(:heyo, subclass_type) }
- let!(:numerator_counter) { fake_numerator_counter(:heyo, subclass_type, numerator_type) }
+ let!(:total_counter) { fake_total_counter("heyo#{subclass_info[:suffix]}") }
+ let!(:numerator_counter) { fake_numerator_counter("heyo#{subclass_info[:suffix]}", subclass_info[:numerator]) }
- it "increments both counters for labels when #{numerator_type} is true" do
- sli.increment(labels: { hello: "world" }, numerator_type => true)
+ it "increments both counters for labels when #{subclass_info[:numerator]} is true" do
+ sli.increment(labels: { hello: "world" }, subclass_info[:numerator] => true)
expect(total_counter).to have_received(:increment).with({ hello: 'world' })
expect(numerator_counter).to have_received(:increment).with({ hello: 'world' })
end
- it "only increments the total counters for labels when #{numerator_type} is false" do
- sli.increment(labels: { hello: "world" }, numerator_type => false)
+ it "only increments the total counters for labels when #{subclass_info[:numerator]} is false" do
+ sli.increment(labels: { hello: "world" }, subclass_info[:numerator] => false)
expect(total_counter).to have_received(:increment).with({ hello: 'world' })
expect(numerator_counter).not_to have_received(:increment).with({ hello: 'world' })
@@ -168,11 +172,11 @@ RSpec.describe Gitlab::Metrics::Sli do
fake_counter
end
- def fake_total_counter(name, type)
- fake_prometheus_counter("gitlab_sli:#{name}_#{type}:total")
+ def fake_total_counter(name)
+ fake_prometheus_counter("gitlab_sli:#{name}:total")
end
- def fake_numerator_counter(name, type, numerator_name)
- fake_prometheus_counter("gitlab_sli:#{name}_#{type}:#{numerator_name}_total")
+ def fake_numerator_counter(name, numerator_name)
+ fake_prometheus_counter("gitlab_sli:#{name}:#{numerator_name}_total")
end
end
diff --git a/spec/lib/gitlab/middleware/compressed_json_spec.rb b/spec/lib/gitlab/middleware/compressed_json_spec.rb
index c5efc568971..a07cd49c572 100644
--- a/spec/lib/gitlab/middleware/compressed_json_spec.rb
+++ b/spec/lib/gitlab/middleware/compressed_json_spec.rb
@@ -8,11 +8,12 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
+ let(:content_type) { 'application/json' }
let(:env) do
{
'HTTP_CONTENT_ENCODING' => 'gzip',
'REQUEST_METHOD' => 'POST',
- 'CONTENT_TYPE' => 'application/json',
+ 'CONTENT_TYPE' => content_type,
'PATH_INFO' => path,
'rack.input' => StringIO.new(input)
}
@@ -35,6 +36,12 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
let(:path) { '/api/v4/error_tracking/collector/1/store'}
it_behaves_like 'decompress middleware'
+
+ context 'with no Content-Type' do
+ let(:content_type) { nil }
+
+ it_behaves_like 'decompress middleware'
+ end
end
context 'with collector route under relative url' do
diff --git a/spec/lib/gitlab/pages_transfer_spec.rb b/spec/lib/gitlab/pages_transfer_spec.rb
deleted file mode 100644
index 021d9cb7318..00000000000
--- a/spec/lib/gitlab/pages_transfer_spec.rb
+++ /dev/null
@@ -1,157 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::PagesTransfer do
- describe '#async' do
- let(:async) { subject.async }
-
- context 'when receiving an allowed method' do
- it 'schedules a PagesTransferWorker', :aggregate_failures do
- described_class::METHODS.each do |meth|
- expect(PagesTransferWorker)
- .to receive(:perform_async).with(meth, %w[foo bar])
-
- async.public_send(meth, 'foo', 'bar')
- end
- end
-
- it 'does nothing if legacy storage is disabled' do
- allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
-
- described_class::METHODS.each do |meth|
- expect(PagesTransferWorker)
- .not_to receive(:perform_async)
-
- async.public_send(meth, 'foo', 'bar')
- end
- end
- end
-
- context 'when receiving a private method' do
- it 'raises NoMethodError' do
- expect { async.move('foo', 'bar') }.to raise_error(NoMethodError)
- end
- end
-
- context 'when receiving a non-existent method' do
- it 'raises NoMethodError' do
- expect { async.foo('bar') }.to raise_error(NoMethodError)
- end
- end
- end
-
- RSpec.shared_examples 'moving a pages directory' do |parameter|
- let!(:pages_path_before) { project.pages_path }
- let(:config_path_before) { File.join(pages_path_before, 'config.json') }
- let(:pages_path_after) { project.reload.pages_path }
- let(:config_path_after) { File.join(pages_path_after, 'config.json') }
-
- before do
- FileUtils.mkdir_p(pages_path_before)
- FileUtils.touch(config_path_before)
- end
-
- after do
- FileUtils.remove_entry(pages_path_before, true)
- FileUtils.remove_entry(pages_path_after, true)
- end
-
- it 'moves the directory' do
- subject.public_send(meth, *args)
-
- expect(File.exist?(config_path_before)).to be(false)
- expect(File.exist?(config_path_after)).to be(true)
- end
-
- it 'returns false if it fails to move the directory' do
- # Move the directory once, so it can't be moved again
- subject.public_send(meth, *args)
-
- expect(subject.public_send(meth, *args)).to be(false)
- end
-
- it 'does nothing if legacy storage is disabled' do
- allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
-
- subject.public_send(meth, *args)
-
- expect(File.exist?(config_path_before)).to be(true)
- expect(File.exist?(config_path_after)).to be(false)
- end
- end
-
- describe '#move_namespace' do
- # Can't use let_it_be because we change the path
- let(:group_1) { create(:group) }
- let(:group_2) { create(:group) }
- let(:subgroup) { create(:group, parent: group_1) }
- let(:project) { create(:project, group: subgroup) }
- let(:new_path) { "#{group_2.path}/#{subgroup.path}" }
- let(:meth) { 'move_namespace' }
-
- # Store the path before we change it
- let!(:args) { [project.path, subgroup.full_path, new_path] }
-
- before do
- # We need to skip hooks, otherwise the directory will be moved
- # via an ActiveRecord callback
- subgroup.update_columns(parent_id: group_2.id)
- subgroup.route.update!(path: new_path)
- end
-
- include_examples 'moving a pages directory'
- end
-
- describe '#move_project' do
- # Can't use let_it_be because we change the path
- let(:group_1) { create(:group) }
- let(:group_2) { create(:group) }
- let(:project) { create(:project, group: group_1) }
- let(:new_path) { group_2.path }
- let(:meth) { 'move_project' }
- let(:args) { [project.path, group_1.full_path, group_2.full_path] }
-
- include_examples 'moving a pages directory' do
- before do
- project.update!(group: group_2)
- end
- end
- end
-
- describe '#rename_project' do
- # Can't use let_it_be because we change the path
- let(:project) { create(:project) }
- let(:new_path) { project.path.succ }
- let(:meth) { 'rename_project' }
-
- # Store the path before we change it
- let!(:args) { [project.path, new_path, project.namespace.full_path] }
-
- include_examples 'moving a pages directory' do
- before do
- project.update!(path: new_path)
- end
- end
- end
-
- describe '#rename_namespace' do
- # Can't use let_it_be because we change the path
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
- let(:new_path) { project.namespace.full_path.succ }
- let(:meth) { 'rename_namespace' }
-
- # Store the path before we change it
- let!(:args) { [project.namespace.full_path, new_path] }
-
- before do
- # We need to skip hooks, otherwise the directory will be moved
- # via an ActiveRecord callback
- group.update_columns(path: new_path)
- group.route.update!(path: new_path)
- end
-
- include_examples 'moving a pages directory'
- end
-end
diff --git a/spec/lib/gitlab/patch/database_config_spec.rb b/spec/lib/gitlab/patch/database_config_spec.rb
index 73dc84bb2ef..b06d28dbcd5 100644
--- a/spec/lib/gitlab/patch/database_config_spec.rb
+++ b/spec/lib/gitlab/patch/database_config_spec.rb
@@ -11,9 +11,6 @@ RSpec.describe Gitlab::Patch::DatabaseConfig do
let(:configuration) { Rails::Application::Configuration.new(Rails.root) }
before do
- allow(File).to receive(:exist?).and_call_original
- allow(File).to receive(:exist?).with(Rails.root.join("config/database_geo.yml")).and_return(false)
-
# The `AS::ConfigurationFile` calls `read` in `def initialize`
# thus we cannot use `expect_next_instance_of`
# rubocop:disable RSpec/AnyInstanceOf
diff --git a/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb b/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb
new file mode 100644
index 00000000000..ce05d5b11c7
--- /dev/null
+++ b/spec/lib/gitlab/project_stats_refresh_conflicts_logger_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ProjectStatsRefreshConflictsLogger do
+ before do
+ Gitlab::ApplicationContext.push(feature_category: 'test', caller_id: 'caller')
+ end
+
+ describe '.warn_artifact_deletion_during_stats_refresh' do
+ it 'logs a warning about artifacts being deleted while the project is undergoing stats refresh' do
+ project_id = 123
+ method = 'Foo#action'
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ hash_including(
+ message: 'Deleted artifacts undergoing refresh',
+ method: method,
+ project_id: project_id,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ described_class.warn_artifact_deletion_during_stats_refresh(project_id: project_id, method: method)
+ end
+ end
+
+ describe '.warn_request_rejected_during_stats_refresh' do
+ it 'logs a warning about artifacts being deleted while the project is undergoing stats refresh' do
+ project_id = 123
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ hash_including(
+ message: 'Rejected request due to project undergoing stats refresh',
+ project_id: project_id,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ described_class.warn_request_rejected_during_stats_refresh(project_id)
+ end
+ end
+
+ describe '.warn_skipped_artifact_deletion_during_stats_refresh' do
+ it 'logs a warning about artifacts being excluded from deletion while the project is undergoing stats refresh' do
+ project_ids = [12, 34]
+ method = 'Foo#action'
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ hash_including(
+ message: 'Skipped deleting artifacts undergoing refresh',
+ method: method,
+ project_ids: match_array(project_ids),
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ described_class.warn_skipped_artifact_deletion_during_stats_refresh(project_ids: project_ids, method: method)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index 0ef52b63bc6..630369977ff 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -3,19 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::ProjectTemplate do
+ include ProjectTemplateTestHelper
+
describe '.all' do
it 'returns all templates' do
- expected = %w[
- rails spring express iosswift dotnetcore android
- gomicro gatsby hugo jekyll plainhtml gitbook
- hexo middleman gitpod_spring_petclinic nfhugo
- nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
- serverless_framework tencent_serverless_framework
- jsonnet cluster_management kotlin_native_linux
- ]
-
expect(described_class.all).to be_an(Array)
- expect(described_class.all.map(&:name)).to match_array(expected)
+ expect(described_class.all.map(&:name)).to match_array(all_templates)
end
end
diff --git a/spec/lib/gitlab/protocol_access_spec.rb b/spec/lib/gitlab/protocol_access_spec.rb
new file mode 100644
index 00000000000..4722ea99608
--- /dev/null
+++ b/spec/lib/gitlab/protocol_access_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::ProtocolAccess do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:p1) { create(:project, :repository, namespace: group) }
+
+ describe ".allowed?" do
+ where(:protocol, :project, :admin_setting, :namespace_setting, :expected_result) do
+ "web" | nil | nil | nil | true
+ "ssh" | nil | nil | nil | true
+ "http" | nil | nil | nil | true
+ "ssh" | nil | "" | nil | true
+ "http" | nil | "" | nil | true
+ "ssh" | nil | "ssh" | nil | true
+ "http" | nil | "http" | nil | true
+ "ssh" | nil | "http" | nil | false
+ "http" | nil | "ssh" | nil | false
+ "ssh" | ref(:p1) | nil | "all" | true
+ "http" | ref(:p1) | nil | "all" | true
+ "ssh" | ref(:p1) | nil | "ssh" | true
+ "http" | ref(:p1) | nil | "http" | true
+ "ssh" | ref(:p1) | nil | "http" | false
+ "http" | ref(:p1) | nil | "ssh" | false
+ "ssh" | ref(:p1) | "" | "all" | true
+ "http" | ref(:p1) | "" | "all" | true
+ "ssh" | ref(:p1) | "ssh" | "ssh" | true
+ "http" | ref(:p1) | "http" | "http" | true
+ end
+
+ with_them do
+ subject { described_class.allowed?(protocol, project: project) }
+
+ before do
+ allow(Gitlab::CurrentSettings).to receive(:enabled_git_access_protocol).and_return(admin_setting)
+
+ if project.present?
+ project.root_namespace.namespace_settings.update!(enabled_git_access_protocol: namespace_setting)
+ end
+ end
+
+ it do
+ is_expected.to be(expected_result)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb
index 39ea02bad8b..7ba4eab50c7 100644
--- a/spec/lib/gitlab/rack_attack_spec.rb
+++ b/spec/lib/gitlab/rack_attack_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
end
before do
- allow(fake_rack_attack).to receive(:throttled_response=)
+ allow(fake_rack_attack).to receive(:throttled_responder=)
allow(fake_rack_attack).to receive(:throttle)
allow(fake_rack_attack).to receive(:track)
allow(fake_rack_attack).to receive(:safelist)
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
it 'configures the throttle response' do
described_class.configure(fake_rack_attack)
- expect(fake_rack_attack).to have_received(:throttled_response=).with(an_instance_of(Proc))
+ expect(fake_rack_attack).to have_received(:throttled_responder=).with(an_instance_of(Proc))
end
it 'configures the safelist' do
diff --git a/spec/lib/gitlab/redis/duplicate_jobs_spec.rb b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
new file mode 100644
index 00000000000..53e3d73d17e
--- /dev/null
+++ b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::DuplicateJobs do
+ # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
+ # to move away from `Sidekiq.redis` for duplicate job data. Thus, we use the
+ # same store configuration as the former.
+ let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
+ let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" }
+
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ subject { described_class.pool }
+
+ before do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+ end
+
+ after do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ context 'store connection settings' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ before do
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.primary_store.connection[:namespace]).to be_nil
+ expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
+ expect(redis_instance.secondary_store.connection[:namespace]).to eq("resque:gitlab")
+
+ expect(redis_instance.instance_name).to eq('DuplicateJobs')
+ end
+ end
+ end
+
+ # Make sure they current namespace is respected for the secondary store but omitted from the primary
+ context 'key namespaces' do
+ let(:key) { 'key' }
+ let(:value) { '123' }
+
+ it 'writes keys to SharedState with no prefix, and to Queues with the "resque:gitlab:" prefix' do
+ subject.with do |redis_instance|
+ redis_instance.set(key, value)
+ end
+
+ Gitlab::Redis::SharedState.with do |redis_instance|
+ expect(redis_instance.get(key)).to eq(value)
+ end
+
+ Gitlab::Redis::Queues.with do |redis_instance|
+ expect(redis_instance.get("resque:gitlab:#{key}")).to eq(value)
+ end
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_duplicate_jobs,
+ :use_primary_store_as_default_for_duplicate_jobs
+ end
+
+ describe '#raw_config_hash' do
+ it 'has a legacy default URL' do
+ expect(subject).to receive(:fetch_config) { false }
+
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
+ end
+ end
+
+ describe '#store_name' do
+ it 'returns the name of the SharedState store' do
+ expect(described_class.store_name).to eq('SharedState')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
new file mode 100644
index 00000000000..e127c89c303
--- /dev/null
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -0,0 +1,924 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::MultiStore do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:redis_store_class) do
+ Class.new(Gitlab::Redis::Wrapper) do
+ def config_file_name
+ config_file_name = "spec/fixtures/config/redis_new_format_host.yml"
+ Rails.root.join(config_file_name).to_s
+ end
+
+ def self.name
+ 'Sessions'
+ end
+ end
+ end
+
+ let_it_be(:primary_db) { 1 }
+ let_it_be(:secondary_db) { 2 }
+ let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
+ let_it_be(:instance_name) { 'TestStore' }
+ let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ subject { multi_store.send(name, *args) }
+
+ before do
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ after(:all) do
+ primary_store.flushdb
+ secondary_store.flushdb
+ end
+
+ context 'when primary_store is nil' do
+ let(:multi_store) { described_class.new(nil, secondary_store, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
+ end
+ end
+
+ context 'when secondary_store is nil' do
+ let(:multi_store) { described_class.new(primary_store, nil, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
+ end
+ end
+
+ context 'when instance_name is nil' do
+ let(:instance_name) { nil }
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ it 'fails with exception' do
+ expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
+ end
+ end
+
+ context 'when primary_store is not a ::Redis instance' do
+ before do
+ allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(primary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(false)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }
+ .to raise_error(ArgumentError, /invalid primary_store/)
+ end
+ end
+
+ context 'when primary_store is a ::Redis::Namespace instance' do
+ before do
+ allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(primary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(true)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }.not_to raise_error
+ end
+ end
+
+ context 'when secondary_store is not a ::Redis instance' do
+ before do
+ allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(secondary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(false)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }
+ .to raise_error(ArgumentError, /invalid secondary_store/)
+ end
+ end
+
+ context 'when secondary_store is a ::Redis::Namespace instance' do
+ before do
+ allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false)
+ allow(secondary_store).to receive(:is_a?).with(::Redis::Namespace).and_return(true)
+ end
+
+ it 'fails with exception' do
+ expect { described_class.new(primary_store, secondary_store, instance_name) }.not_to raise_error
+ end
+ end
+
+ context 'with READ redis commands' do
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:key2) { "redis:{1}:key_b" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:skey) { "redis:set:key" }
+ let_it_be(:keys) { [key1, key2] }
+ let_it_be(:values) { [value1, value2] }
+ let_it_be(:svalues) { [value2, value1] }
+
+ where(:case_name, :name, :args, :value, :block) do
+ 'execute :get command' | :get | ref(:key1) | ref(:value1) | nil
+ 'execute :mget command' | :mget | ref(:keys) | ref(:values) | nil
+ 'execute :mget with block' | :mget | ref(:keys) | ref(:values) | ->(value) { value }
+ 'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | nil
+ 'execute :scard command' | :scard | ref(:skey) | 2 | nil
+ end
+
+ before(:all) do
+ primary_store.multi do |multi|
+ multi.set(key1, value1)
+ multi.set(key2, value2)
+ multi.sadd(skey, value1)
+ multi.sadd(skey, value2)
+ end
+
+ secondary_store.multi do |multi|
+ multi.set(key1, value1)
+ multi.set(key2, value2)
+ multi.sadd(skey, value1)
+ multi.sadd(skey, value2)
+ end
+ end
+
+ RSpec.shared_examples_for 'reads correct value' do
+ it 'returns the correct value' do
+ if value.is_a?(Array)
+ # :smembers does not guarantee the order it will return the values (unsorted set)
+ is_expected.to match_array(value)
+ else
+ is_expected.to eq(value)
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'fallback read from the secondary store' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
+ it 'fallback and execute on secondary instance' do
+ expect(secondary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ it 'logs the ReadFromPrimaryError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::ReadFromPrimaryError),
+ hash_including(command_name: name, instance_name: instance_name)
+ )
+
+ subject
+ end
+
+ it 'increment read fallback count metrics' do
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ context 'when fallback read from the secondary instance raises an exception' do
+ before do
+ allow(secondary_store).to receive(name).with(*args).and_raise(StandardError)
+ end
+
+ it 'fails with exception' do
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'secondary store' do
+ it 'execute on the secondary instance' do
+ expect(secondary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ it 'does not execute on the primary store' do
+ expect(primary_store).not_to receive(name)
+
+ subject
+ end
+ end
+
+ with_them do
+ describe "#{name}" do
+ before do
+ allow(primary_store).to receive(name).and_call_original
+ allow(secondary_store).to receive(name).and_call_original
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when reading from the primary is successful' do
+ it 'returns the correct value' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+ end
+
+ context 'when reading from primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, instance_name: instance_name, command_name: name))
+
+ subject
+ end
+
+ include_examples 'fallback read from the secondary store'
+ end
+
+ context 'when reading from primary instance return no value' do
+ before do
+ allow(primary_store).to receive(name).and_return(nil)
+ end
+
+ include_examples 'fallback read from the secondary store'
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.send(name, *args)
+ end
+ end
+
+ it 'is executed only 1 time on primary instance' do
+ expect(primary_store).to receive(name).with(*args).once
+
+ subject
+ end
+ end
+
+ if params[:block]
+ subject do
+ multi_store.send(name, *args, &block)
+ end
+
+ context 'when block is provided' do
+ it 'yields to the block' do
+ expect(primary_store).to receive(name).and_yield(value)
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+ end
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it_behaves_like 'secondary store'
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'execute on the primary instance' do
+ expect(primary_store).to receive(name).with(*args).and_call_original
+
+ subject
+ end
+
+ include_examples 'reads correct value'
+
+ it 'does not execute on the secondary store' do
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+ end
+ end
+
+ context 'with both primary and secondary store using same redis instance' do
+ let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+
+ it_behaves_like 'secondary store'
+ end
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'verify that store contains values' do |store|
+ it "#{store} redis store contains correct values", :aggregate_errors do
+ subject
+
+ redis_store = multi_store.send(store)
+
+ if expected_value.is_a?(Array)
+ # :smembers does not guarantee the order it will return the values
+ expect(redis_store.send(verification_name, *verification_args)).to match_array(expected_value)
+ else
+ expect(redis_store.send(verification_name, *verification_args)).to eq(expected_value)
+ end
+ end
+ end
+
+ context 'with WRITE redis commands' do
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:key2) { "redis:{1}:key_b" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:key1_value1) { [key1, value1] }
+ let_it_be(:key1_value2) { [key1, value2] }
+ let_it_be(:ttl) { 10 }
+ let_it_be(:key1_ttl_value1) { [key1, ttl, value1] }
+ let_it_be(:skey) { "redis:set:key" }
+ let_it_be(:svalues1) { [value2, value1] }
+ let_it_be(:svalues2) { [value1] }
+ let_it_be(:skey_value1) { [skey, value1] }
+ let_it_be(:skey_value2) { [skey, value2] }
+
+ where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do
+ 'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
+ 'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
+ 'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
+ 'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
+ 'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
+ 'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
+ 'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
+ end
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+
+ primary_store.multi do |multi|
+ multi.set(key2, value1)
+ multi.sadd(skey, value1)
+ end
+
+ secondary_store.multi do |multi|
+ multi.set(key2, value1)
+ multi.sadd(skey, value1)
+ end
+ end
+
+ with_them do
+ describe "#{name}" do
+ let(:expected_args) {args || no_args }
+
+ before do
+ allow(primary_store).to receive(name).and_call_original
+ allow(secondary_store).to receive(name).and_call_original
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when executing on the primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, command_name: name, instance_name: instance_name))
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.send(name, *args)
+ end
+ end
+
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).once
+ expect(secondary_store).to receive(name).with(*expected_args).once
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on the secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name).with(*expected_args)
+ expect(primary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'executes only on the primary_redis redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args)
+ expect(secondary_store).not_to receive(name).with(*expected_args)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ end
+ end
+ end
+ end
+ end
+
+ RSpec.shared_examples_for 'pipelined command' do |name|
+ let_it_be(:key1) { "redis:{1}:key_a" }
+ let_it_be(:value1) { "redis_value1"}
+ let_it_be(:value2) { "redis_value2"}
+ let_it_be(:expected_value) { value1 }
+ let_it_be(:verification_name) { :get }
+ let_it_be(:verification_args) { key1 }
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+ end
+
+ describe "command execution in a transaction" do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).with(
+ :gitlab_redis_multi_store_pipelined_diff_error_total,
+ 'Redis MultiStore pipelined command diff between stores'
+ ).and_return(counter)
+ end
+
+ subject do
+ multi_store.send(name) do |redis|
+ redis.set(key1, value1)
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ context 'when executing on primary instance is successful' do
+ it 'executes on both primary and secondary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).and_call_original
+ expect(secondary_store).to receive(name).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'when executing on the primary instance is raising an exception' do
+ before do
+ allow(primary_store).to receive(name).and_raise(StandardError)
+ allow(Gitlab::ErrorTracking).to receive(:log_exception)
+ end
+
+ it 'logs the exception and execute on secondary instance', :aggregate_errors do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError),
+ hash_including(:multi_store_error_message, command_name: name))
+ expect(secondary_store).to receive(name).and_call_original
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ describe 'return values from a transaction' do
+ subject do
+ multi_store.send(name) do |redis|
+ redis.get(key1)
+ end
+ end
+
+ context 'when the value exists on both and are equal' do
+ before do
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value1)
+ end
+
+ it 'returns the value' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect(subject).to eq([value1])
+ end
+ end
+
+ context 'when the value exists on both but differ' do
+ before do
+ primary_store.set(key1, value1)
+ secondary_store.set(key1, value2)
+ end
+
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ hash_including(command_name: name, instance_name: instance_name)
+ ).and_call_original
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
+ end
+ end
+
+ context 'when the value does not exist on the primary but it does on the secondary' do
+ before do
+ secondary_store.set(key1, value2)
+ end
+
+ it 'returns the value from the secondary store, logging an error' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::PipelinedDiffError),
+ hash_including(command_name: name, instance_name: instance_name)
+ )
+ expect(counter).to receive(:increment).with(command: name, instance_name: instance_name)
+
+ expect(subject).to eq([value2])
+ end
+ end
+
+ context 'when the value does not exist in either' do
+ it 'returns nil without logging an error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(counter).not_to receive(:increment)
+
+ expect(subject).to eq([nil])
+ end
+ end
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on the secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name)
+ expect(primary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :secondary_store
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'executes only on the primary_redis redis store', :aggregate_errors do
+ expect(primary_store).to receive(name)
+ expect(secondary_store).not_to receive(name)
+
+ subject
+ end
+
+ include_examples 'verify that store contains values', :primary_store
+ end
+ end
+ end
+ end
+
+ describe '#multi' do
+ include_examples 'pipelined command', :multi
+ end
+
+ describe '#pipelined' do
+ include_examples 'pipelined command', :pipelined
+ end
+
+ context 'with unsupported command' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
+ let_it_be(:key) { "redis:counter" }
+
+ subject { multi_store.incr(key) }
+
+ it 'responds to missing method' do
+ expect(multi_store).to receive(:respond_to_missing?).and_call_original
+
+ expect(multi_store.respond_to?(:incr)).to be(true)
+ end
+
+ it 'executes method missing' do
+ expect(multi_store).to receive(:method_missing)
+
+ subject
+ end
+
+ context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ it 'logs MethodMissingError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
+ hash_including(command_name: :incr, instance_name: instance_name)
+ )
+
+ subject
+ end
+
+ it 'increments method missing counter' do
+ expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
+
+ subject
+ end
+ end
+
+ context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ subject { multi_store.info }
+
+ it 'does not log MethodMissingError' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ subject
+ end
+
+ it 'does not increment method missing counter' do
+ expect(counter).not_to receive(:increment)
+
+ subject
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(primary_store).to receive(:incr).with(key).and_call_original
+ expect(secondary_store).not_to receive(:incr)
+
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
+
+ expect(secondary_store.get(key)).to be_nil
+ expect(primary_store.get(key)).to eq('1')
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(secondary_store).to receive(:incr).with(key).and_call_original
+ expect(primary_store).not_to receive(:incr)
+
+ subject
+ end
+
+ it 'correct value is stored on the secondary store', :aggregate_errors do
+ subject
+
+ expect(primary_store.get(key)).to be_nil
+ expect(secondary_store.get(key)).to eq('1')
+ end
+ end
+
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined do
+ multi_store.incr(key)
+ end
+ end
+
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(:incr).with(key).once
+ expect(secondary_store).to receive(:incr).with(key).once
+
+ subject
+ end
+
+ it "both redis stores are containing correct values", :aggregate_errors do
+ subject
+
+ expect(primary_store.get(key)).to eq('1')
+ expect(secondary_store.get(key)).to eq('1')
+ end
+ end
+ end
+
+ describe '#to_s' do
+ subject { multi_store.to_s }
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(primary_store.to_s)
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'returns same value as primary_store' do
+ is_expected.to eq(secondary_store.to_s)
+ end
+ end
+ end
+ end
+
+ describe '#is_a?' do
+ it 'returns true for ::Redis::Store' do
+ expect(multi_store.is_a?(::Redis::Store)).to be true
+ end
+ end
+
+ describe '#use_primary_and_secondary_stores?' do
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_and_secondary_stores_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'with empty DB' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'when FF table guard raises' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ describe '#use_primary_store_as_default?' do
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be true
+ end
+ end
+
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_store_as_default?).to be false
+ end
+ end
+
+ context 'with empty DB' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+
+ context 'when FF table guard raises' do
+ before do
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise
+ end
+
+ it 'multi store is disabled' do
+ expect(multi_store.use_primary_and_secondary_stores?).to be false
+ end
+ end
+ end
+
+ def create_redis_store(options, extras = {})
+ ::Redis::Store.new(options.merge(extras))
+ end
+end
diff --git a/spec/lib/gitlab/redis/sidekiq_status_spec.rb b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
new file mode 100644
index 00000000000..f641ea40efd
--- /dev/null
+++ b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::SidekiqStatus do
+ # Note: this is a pseudo-store in front of `SharedState`, meant only as a tool
+ # to move away from `Sidekiq.redis` for sidekiq status data. Thus, we use the
+ # same store configuration as the former.
+ let(:instance_specific_config_file) { "config/redis.shared_state.yml" }
+ let(:environment_config_file_name) { "GITLAB_REDIS_SHARED_STATE_CONFIG_FILE" }
+
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ subject { described_class.pool }
+
+ before do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+
+ allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ after do
+ redis_clear_raw_config!(Gitlab::Redis::SharedState)
+ redis_clear_raw_config!(Gitlab::Redis::Queues)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("redis:///path/to/redis.sock/0")
+
+ expect(redis_instance.instance_name).to eq('SidekiqStatus')
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_sidekiq_status,
+ :use_primary_store_as_default_for_sidekiq_status
+ end
+
+ describe '#raw_config_hash' do
+ it 'has a legacy default URL' do
+ expect(subject).to receive(:fetch_config) { false }
+
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6382')
+ end
+ end
+
+ describe '#store_name' do
+ it 'returns the name of the SharedState store' do
+ expect(described_class.store_name).to eq('SharedState')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/regex_requires_app_spec.rb b/spec/lib/gitlab/regex_requires_app_spec.rb
new file mode 100644
index 00000000000..5808033dc4c
--- /dev/null
+++ b/spec/lib/gitlab/regex_requires_app_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Only specs that *cannot* be run with fast_spec_helper only
+# See regex_spec for tests that do not require the full spec_helper
+RSpec.describe Gitlab::Regex do
+ describe '.debian_architecture_regex' do
+ subject { described_class.debian_architecture_regex }
+
+ it { is_expected.to match('amd64') }
+ it { is_expected.to match('kfreebsd-i386') }
+
+ # may not be empty string
+ it { is_expected.not_to match('') }
+ # must start with an alphanumeric
+ it { is_expected.not_to match('-a') }
+ it { is_expected.not_to match('+a') }
+ it { is_expected.not_to match('.a') }
+ it { is_expected.not_to match('_a') }
+ # only letters, digits and characters '-'
+ it { is_expected.not_to match('a+b') }
+ it { is_expected.not_to match('a.b') }
+ it { is_expected.not_to match('a_b') }
+ it { is_expected.not_to match('a~') }
+ it { is_expected.not_to match('aé') }
+
+ # More strict
+ # Enforce lowercase
+ it { is_expected.not_to match('AMD64') }
+ it { is_expected.not_to match('Amd64') }
+ it { is_expected.not_to match('aMD64') }
+ end
+
+ describe '.npm_package_name_regex' do
+ subject { described_class.npm_package_name_regex }
+
+ it { is_expected.to match('@scope/package') }
+ it { is_expected.to match('unscoped-package') }
+ it { is_expected.not_to match('@first-scope@second-scope/package') }
+ it { is_expected.not_to match('scope-without-at-symbol/package') }
+ it { is_expected.not_to match('@not-a-scoped-package') }
+ it { is_expected.not_to match('@scope/sub/package') }
+ it { is_expected.not_to match('@scope/../../package') }
+ it { is_expected.not_to match('@scope%2e%2e%2fpackage') }
+ it { is_expected.not_to match('@%2e%2e%2f/package') }
+
+ context 'capturing group' do
+ [
+ ['@scope/package', 'scope'],
+ ['unscoped-package', nil],
+ ['@not-a-scoped-package', nil],
+ ['@scope/sub/package', nil],
+ ['@inv@lid-scope/package', nil]
+ ].each do |package_name, extracted_scope_name|
+ it "extracts the scope name for #{package_name}" do
+ match = package_name.match(described_class.npm_package_name_regex)
+ expect(match&.captures&.first).to eq(extracted_scope_name)
+ end
+ end
+ end
+ end
+
+ describe '.debian_distribution_regex' do
+ subject { described_class.debian_distribution_regex }
+
+ it { is_expected.to match('buster') }
+ it { is_expected.to match('buster-updates') }
+ it { is_expected.to match('Debian10.5') }
+
+ # Do not allow slash, even if this exists in the wild
+ it { is_expected.not_to match('jessie/updates') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
+ describe '.debian_component_regex' do
+ subject { described_class.debian_component_regex }
+
+ it { is_expected.to match('main') }
+ it { is_expected.to match('non-free') }
+
+ # Do not allow slash
+ it { is_expected.not_to match('non/free') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index b4c1f3b689b..d48e8183650 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -1,7 +1,11 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require_relative '../../../lib/gitlab/regex'
+
+# All specs that can be run with fast_spec_helper only
+# See regex_requires_app_spec for tests that require the full spec_helper
RSpec.describe Gitlab::Regex do
shared_examples_for 'project/group name chars regex' do
it { is_expected.to match('gitlab-ce') }
@@ -401,35 +405,6 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
- describe '.npm_package_name_regex' do
- subject { described_class.npm_package_name_regex }
-
- it { is_expected.to match('@scope/package') }
- it { is_expected.to match('unscoped-package') }
- it { is_expected.not_to match('@first-scope@second-scope/package') }
- it { is_expected.not_to match('scope-without-at-symbol/package') }
- it { is_expected.not_to match('@not-a-scoped-package') }
- it { is_expected.not_to match('@scope/sub/package') }
- it { is_expected.not_to match('@scope/../../package') }
- it { is_expected.not_to match('@scope%2e%2e%2fpackage') }
- it { is_expected.not_to match('@%2e%2e%2f/package') }
-
- context 'capturing group' do
- [
- ['@scope/package', 'scope'],
- ['unscoped-package', nil],
- ['@not-a-scoped-package', nil],
- ['@scope/sub/package', nil],
- ['@inv@lid-scope/package', nil]
- ].each do |package_name, extracted_scope_name|
- it "extracts the scope name for #{package_name}" do
- match = package_name.match(described_class.npm_package_name_regex)
- expect(match&.captures&.first).to eq(extracted_scope_name)
- end
- end
- end
- end
-
describe '.nuget_version_regex' do
subject { described_class.nuget_version_regex }
@@ -595,8 +570,7 @@ RSpec.describe Gitlab::Regex do
# nothing after colon in version number
it { is_expected.not_to match('2:') }
# revision number is empty
- # Note: we are less strict here
- # it { is_expected.not_to match('1.0-') }
+ it { is_expected.not_to match('1.0-') }
# version number is empty
it { is_expected.not_to match('-1') }
it { is_expected.not_to match('2:-1') }
@@ -618,63 +592,12 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('1.0 ') }
# dpkg accepts multiple colons
it { is_expected.not_to match('1:2:3') }
+ # we limit the number of dashes
+ it { is_expected.to match('1-2-3-4-5-6-7-8-9-10-11-12-13-14-15') }
+ it { is_expected.not_to match('1-2-3-4-5-6-7-8-9-10-11-12-13-14-15-16') }
end
end
- describe '.debian_architecture_regex' do
- subject { described_class.debian_architecture_regex }
-
- it { is_expected.to match('amd64') }
- it { is_expected.to match('kfreebsd-i386') }
-
- # may not be empty string
- it { is_expected.not_to match('') }
- # must start with an alphanumeric
- it { is_expected.not_to match('-a') }
- it { is_expected.not_to match('+a') }
- it { is_expected.not_to match('.a') }
- it { is_expected.not_to match('_a') }
- # only letters, digits and characters '-'
- it { is_expected.not_to match('a+b') }
- it { is_expected.not_to match('a.b') }
- it { is_expected.not_to match('a_b') }
- it { is_expected.not_to match('a~') }
- it { is_expected.not_to match('aé') }
-
- # More strict
- # Enforce lowercase
- it { is_expected.not_to match('AMD64') }
- it { is_expected.not_to match('Amd64') }
- it { is_expected.not_to match('aMD64') }
- end
-
- describe '.debian_distribution_regex' do
- subject { described_class.debian_distribution_regex }
-
- it { is_expected.to match('buster') }
- it { is_expected.to match('buster-updates') }
- it { is_expected.to match('Debian10.5') }
-
- # Do not allow slash, even if this exists in the wild
- it { is_expected.not_to match('jessie/updates') }
-
- # Do not allow Unicode
- it { is_expected.not_to match('hé') }
- end
-
- describe '.debian_component_regex' do
- subject { described_class.debian_component_regex }
-
- it { is_expected.to match('main') }
- it { is_expected.to match('non-free') }
-
- # Do not allow slash
- it { is_expected.not_to match('non/free') }
-
- # Do not allow Unicode
- it { is_expected.not_to match('hé') }
- end
-
describe '.helm_channel_regex' do
subject { described_class.helm_channel_regex }
@@ -1020,4 +943,29 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('a' * 63 + '#') }
it { is_expected.not_to match('') }
end
+
+ describe '.sep_by_1' do
+ subject { %r{\A #{described_class.sep_by_1(/\.+/, /[abcdef]{3}/)} \z}x }
+
+ it { is_expected.to match('abc') }
+ it { is_expected.to match('abc.def') }
+ it { is_expected.to match('abc.def.caf') }
+ it { is_expected.to match('abc..def') }
+ it { is_expected.to match('abc..def..caf') }
+ it { is_expected.to match('abc...def') }
+ it { is_expected.to match('abc....def........caf') }
+ it { is_expected.to match((['abc'] * 100).join('.')) }
+
+ it { is_expected.not_to match('') }
+ it { is_expected.not_to match('a') }
+ it { is_expected.not_to match('aaaa') }
+ it { is_expected.not_to match('foo') }
+ it { is_expected.not_to match('.abc') }
+ it { is_expected.not_to match('abc.') }
+ it { is_expected.not_to match('.abc.def') }
+ it { is_expected.not_to match('abc.def.') }
+ it { is_expected.not_to match('abc.defe.caf') }
+ it { is_expected.not_to match('abc!abc') }
+ it { is_expected.not_to match((['abc'] * 100).join('.') + '!') }
+ end
end
diff --git a/spec/lib/gitlab/render_timeout_spec.rb b/spec/lib/gitlab/render_timeout_spec.rb
new file mode 100644
index 00000000000..f322d71867b
--- /dev/null
+++ b/spec/lib/gitlab/render_timeout_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::RenderTimeout do
+ def expect_timeout(period)
+ block = proc {}
+
+ expect(Timeout).to receive(:timeout).with(period) do |_, &block|
+ expect(block).to eq(block)
+ end
+
+ described_class.timeout(&block)
+ end
+
+ it 'utilizes timeout for web' do
+ expect_timeout(described_class::FOREGROUND)
+ end
+
+ it 'utilizes longer timeout for sidekiq' do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+
+ expect_timeout(described_class::BACKGROUND)
+ end
+end
diff --git a/spec/lib/gitlab/seeder_spec.rb b/spec/lib/gitlab/seeder_spec.rb
index a22d47cbfb3..a94ae2bca7a 100644
--- a/spec/lib/gitlab/seeder_spec.rb
+++ b/spec/lib/gitlab/seeder_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Seeder do
describe '.quiet' do
let(:database_base_models) do
{
- main: ApplicationRecord,
+ main: ActiveRecord::Base,
ci: Ci::ApplicationRecord
}
end
diff --git a/spec/lib/gitlab/service_desk_email_spec.rb b/spec/lib/gitlab/service_desk_email_spec.rb
index 67a1f07eec6..9847496e361 100644
--- a/spec/lib/gitlab/service_desk_email_spec.rb
+++ b/spec/lib/gitlab/service_desk_email_spec.rb
@@ -78,4 +78,10 @@ RSpec.describe Gitlab::ServiceDeskEmail do
end
end
end
+
+ context 'self.key_from_fallback_message_id' do
+ it 'returns reply key' do
+ expect(described_class.key_from_fallback_message_id('reply-key@localhost')).to eq('key')
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 00ae55237e9..9c0cbe21e6b 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -59,6 +59,21 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
+ it 'logs the normalized SQL query for statement timeouts' do
+ travel_to(timestamp) do
+ expect(logger).to receive(:info).with(start_payload)
+ expect(logger).to receive(:warn).with(
+ include('exception.sql' => 'SELECT "users".* FROM "users" WHERE "users"."id" = $1 AND "users"."foo" = $2')
+ )
+
+ expect do
+ call_subject(job, 'test_queue') do
+ raise ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = 2')
+ end
+ end.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+
it 'logs the root cause of an Sidekiq::JobRetry::Skip exception in the job' do
travel_to(timestamp) do
expect(logger).to receive(:info).with(start_payload)
@@ -100,8 +115,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
include(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
'job_status' => 'fail',
- 'error_class' => 'Sidekiq::JobRetry::Skip',
- 'error_message' => 'Sidekiq::JobRetry::Skip'
+ 'exception.class' => 'Sidekiq::JobRetry::Skip',
+ 'exception.message' => 'Sidekiq::JobRetry::Skip'
)
)
expect(subject).to receive(:log_job_start).and_call_original
@@ -288,7 +303,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'duration_s' => 0.0,
'completed_at' => timestamp.to_f,
'cpu_s' => 1.111112,
- 'rate_limiting_gates' => []
+ 'rate_limiting_gates' => [],
+ 'worker_id' => "process_#{Process.pid}"
)
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index 8d46845548a..d240bf51e67 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues do
+RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
using RSpec::Parameterized::TableSyntax
subject(:duplicate_job) do
@@ -81,135 +81,99 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- describe '#check!' do
- context 'when there was no job in the queue yet' do
- it { expect(duplicate_job.check!).to eq('123') }
+ shared_examples 'tracking duplicates in redis' do
+ describe '#check!' do
+ context 'when there was no job in the queue yet' do
+ it { expect(duplicate_job.check!).to eq('123') }
- shared_examples 'sets Redis keys with correct TTL' do
- it "adds an idempotency key with correct ttl" do
- expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from([nil, -2])
- .to(['123', be_within(1).of(expected_ttl)])
- end
-
- context 'when wal locations is not empty' do
- it "adds an existing wal locations key with correct ttl" do
+ shared_examples 'sets Redis keys with correct TTL' do
+ it "adds an idempotency key with correct ttl" do
expect { duplicate_job.check! }
- .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([nil, -2])
- .to([wal_locations[:main], be_within(1).of(expected_ttl)])
- .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .to change { read_idempotency_key_with_ttl(idempotency_key) }
.from([nil, -2])
- .to([wal_locations[:ci], be_within(1).of(expected_ttl)])
+ .to(['123', be_within(1).of(expected_ttl)])
end
- end
- end
- context 'with TTL option is not set' do
- let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
-
- it_behaves_like 'sets Redis keys with correct TTL'
- end
-
- context 'when TTL option is set' do
- let(:expected_ttl) { 5.minutes }
-
- before do
- allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
+ context 'when wal locations is not empty' do
+ it "adds an existing wal locations key with correct ttl" do
+ expect { duplicate_job.check! }
+ .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([nil, -2])
+ .to([wal_locations[:main], be_within(1).of(expected_ttl)])
+ .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([nil, -2])
+ .to([wal_locations[:ci], be_within(1).of(expected_ttl)])
+ end
+ end
end
- it_behaves_like 'sets Redis keys with correct TTL'
- end
+ context 'when TTL option is not set' do
+ let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL }
- it "adds the idempotency key to the jobs payload" do
- expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
- end
- end
-
- context 'when there was already a job with same arguments in the same queue' do
- before do
- set_idempotency_key(idempotency_key, 'existing-key')
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ it_behaves_like 'sets Redis keys with correct TTL'
end
- end
- it { expect(duplicate_job.check!).to eq('existing-key') }
+ context 'when TTL option is set' do
+ let(:expected_ttl) { 5.minutes }
- it "does not change the existing key's TTL" do
- expect { duplicate_job.check! }
- .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
- .from(['existing-key', -1])
- end
-
- it "does not change the existing wal locations key's TTL" do
- expect { duplicate_job.check! }
- .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
- .from([wal_locations[:main], -1])
- .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
- .from([wal_locations[:ci], -1])
- end
+ before do
+ allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl })
+ end
- it 'sets the existing jid' do
- duplicate_job.check!
+ it_behaves_like 'sets Redis keys with correct TTL'
+ end
- expect(duplicate_job.existing_jid).to eq('existing-key')
+ it "adds the idempotency key to the jobs payload" do
+ expect { duplicate_job.check! }.to change { job['idempotency_key'] }.from(nil).to(idempotency_key)
+ end
end
- end
- end
-
- describe '#update_latest_wal_location!' do
- before do
- allow(Gitlab::Database).to receive(:database_base_models).and_return(
- { main: ::ActiveRecord::Base,
- ci: ::ActiveRecord::Base })
- set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
- set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
+ context 'when there was already a job with same arguments in the same queue' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-key')
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ end
+ end
- # read existing_wal_locations
- duplicate_job.check!
- end
+ it { expect(duplicate_job.check!).to eq('existing-key') }
- context "when the key doesn't exists in redis" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
- end
+ it "does not change the existing key's TTL" do
+ expect { duplicate_job.check! }
+ .not_to change { read_idempotency_key_with_ttl(idempotency_key) }
+ .from(['existing-key', -1])
+ end
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ it "does not change the existing wal locations key's TTL" do
+ expect { duplicate_job.check! }
+ .to not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) }
+ .from([wal_locations[:main], -1])
+ .and not_change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) }
+ .from([wal_locations[:ci], -1])
+ end
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ it 'sets the existing jid' do
+ duplicate_job.check!
- it 'stores a wal location to redis with an offset relative to existing wal location' do
- expect { duplicate_job.update_latest_wal_location! }
- .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from([])
- .to(new_wal_location_with_offset[:main])
- .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from([])
- .to(new_wal_location_with_offset[:ci])
+ expect(duplicate_job.existing_jid).to eq('existing-key')
+ end
end
end
- context "when the key exists in redis" do
+ describe '#update_latest_wal_location!' do
before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
- end
+ allow(Gitlab::Database).to receive(:database_base_models).and_return(
+ { main: ::ActiveRecord::Base,
+ ci: ::ActiveRecord::Base })
- let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+ set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main])
+ set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci])
- context "when the new offset is bigger then the existing one" do
+ # read existing_wal_locations
+ duplicate_job.check!
+ end
+
+ context "when the key doesn't exists in redis" do
let(:existing_wal) do
{
main: '0/D525E3A0',
@@ -217,14 +181,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A3', '3'],
- ci: ['AB/12342', '2']
- }
- end
-
let(:new_wal_location_with_offset) do
{
# offset is relative to `existing_wal`
@@ -233,154 +189,335 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
}
end
- it 'updates a wal location to redis with an offset' do
+ let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
+
+ it 'stores a wal location to redis with an offset relative to existing wal location' do
expect { duplicate_job.update_latest_wal_location! }
.to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
+ .from([])
.to(new_wal_location_with_offset[:main])
.and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
+ .from([])
.to(new_wal_location_with_offset[:ci])
end
end
- context "when the old offset is not bigger then the existing one" do
- let(:existing_wal) do
- {
- main: '0/D525E3A0',
- ci: 'AB/12340'
- }
+ context "when the key exists in redis" do
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main])
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci])
end
- let(:stored_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A8', '8'],
- ci: ['AB/12345', '5']
- }
- end
+ let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) }
- let(:new_wal_location_with_offset) do
- {
- # offset is relative to `existing_wal`
- main: ['0/D525E3A2', '2'],
- ci: ['AB/12342', '2']
- }
+ context "when the new offset is bigger then the existing one" do
+ let(:existing_wal) do
+ {
+ main: '0/D525E3A0',
+ ci: 'AB/12340'
+ }
+ end
+
+ let(:stored_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A3', '3'],
+ ci: ['AB/12342', '2']
+ }
+ end
+
+ let(:new_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A8', '8'],
+ ci: ['AB/12345', '5']
+ }
+ end
+
+ it 'updates a wal location to redis with an offset' do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from(stored_wal_location_with_offset[:main])
+ .to(new_wal_location_with_offset[:main])
+ .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from(stored_wal_location_with_offset[:ci])
+ .to(new_wal_location_with_offset[:ci])
+ end
end
- it "does not update a wal location to redis with an offset" do
- expect { duplicate_job.update_latest_wal_location! }
- .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
- .from(stored_wal_location_with_offset[:main])
- .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
- .from(stored_wal_location_with_offset[:ci])
+ context "when the old offset is not bigger then the existing one" do
+ let(:existing_wal) do
+ {
+ main: '0/D525E3A0',
+ ci: 'AB/12340'
+ }
+ end
+
+ let(:stored_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A8', '8'],
+ ci: ['AB/12345', '5']
+ }
+ end
+
+ let(:new_wal_location_with_offset) do
+ {
+ # offset is relative to `existing_wal`
+ main: ['0/D525E3A2', '2'],
+ ci: ['AB/12342', '2']
+ }
+ end
+
+ it "does not update a wal location to redis with an offset" do
+ expect { duplicate_job.update_latest_wal_location! }
+ .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) }
+ .from(stored_wal_location_with_offset[:main])
+ .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) }
+ .from(stored_wal_location_with_offset[:ci])
+ end
end
end
end
- end
- describe '#latest_wal_locations' do
- context 'when job was deduplicated and wal locations were already persisted' do
- before do
- rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
- rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
- end
+ describe '#latest_wal_locations' do
+ context 'when job was deduplicated and wal locations were already persisted' do
+ before do
+ rpush_to_redis_key(wal_location_key(idempotency_key, :main), wal_locations[:main], 1024)
+ rpush_to_redis_key(wal_location_key(idempotency_key, :ci), wal_locations[:ci], 1024)
+ end
- it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
- end
+ it { expect(duplicate_job.latest_wal_locations).to eq(wal_locations) }
+ end
- context 'when job is not deduplication and wal locations were not persisted' do
- it { expect(duplicate_job.latest_wal_locations).to be_empty }
+ context 'when job is not deduplication and wal locations were not persisted' do
+ it { expect(duplicate_job.latest_wal_locations).to be_empty }
+ end
end
- end
- describe '#delete!' do
- context "when we didn't track the definition" do
- it { expect { duplicate_job.delete! }.not_to raise_error }
- end
+ describe '#delete!' do
+ context "when we didn't track the definition" do
+ it { expect { duplicate_job.delete! }.not_to raise_error }
+ end
- context 'when the key exists in redis' do
- before do
- set_idempotency_key(idempotency_key, 'existing-jid')
- set_idempotency_key(deduplicated_flag_key, 1)
- wal_locations.each do |config_name, location|
- set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
- set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
+ context 'when the key exists in redis' do
+ before do
+ set_idempotency_key(idempotency_key, 'existing-jid')
+ set_idempotency_key(deduplicated_flag_key, 1)
+ wal_locations.each do |config_name, location|
+ set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location)
+ set_idempotency_key(wal_location_key(idempotency_key, config_name), location)
+ end
end
- end
- shared_examples 'deleting the duplicate job' do
- shared_examples 'deleting keys from redis' do |key_name|
- it "removes the #{key_name} from redis" do
- expect { duplicate_job.delete! }
- .to change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
- .to([nil, -2])
+ shared_examples 'deleting the duplicate job' do
+ shared_examples 'deleting keys from redis' do |key_name|
+ it "removes the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ .to([nil, -2])
+ end
+ end
+
+ shared_examples 'does not delete key from redis' do |key_name|
+ it "does not remove the #{key_name} from redis" do
+ expect { duplicate_job.delete! }
+ .to not_change { read_idempotency_key_with_ttl(key) }
+ .from([from_value, -1])
+ end
+ end
+
+ it_behaves_like 'deleting keys from redis', 'idempotent key' do
+ let(:key) { idempotency_key }
+ let(:from_value) { 'existing-jid' }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
+ let(:key) { deduplicated_flag_key }
+ let(:from_value) { '1' }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
+ let(:key) { existing_wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
+ let(:key) { wal_location_key(idempotency_key, :main) }
+ let(:from_value) { wal_locations[:main] }
+ end
+
+ it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
+ let(:key) { wal_location_key(idempotency_key, :ci) }
+ let(:from_value) { wal_locations[:ci] }
end
end
- shared_examples 'does not delete key from redis' do |key_name|
- it "does not remove the #{key_name} from redis" do
- expect { duplicate_job.delete! }
- .to not_change { read_idempotency_key_with_ttl(key) }
- .from([from_value, -1])
+ context 'when the idempotency key is not part of the job' do
+ it_behaves_like 'deleting the duplicate job'
+
+ it 'recalculates the idempotency hash' do
+ expect(duplicate_job).to receive(:idempotency_hash).and_call_original
+
+ duplicate_job.delete!
end
end
- it_behaves_like 'deleting keys from redis', 'idempotent key' do
- let(:key) { idempotency_key }
- let(:from_value) { 'existing-jid' }
+ context 'when the idempotency key is part of the job' do
+ let(:idempotency_key) { 'not the same as what we calculate' }
+ let(:job) { super().merge('idempotency_key' => idempotency_key) }
+
+ it_behaves_like 'deleting the duplicate job'
+
+ it 'does not recalculate the idempotency hash' do
+ expect(duplicate_job).not_to receive(:idempotency_hash)
+
+ duplicate_job.delete!
+ end
end
+ end
+ end
- it_behaves_like 'deleting keys from redis', 'deduplication counter key' do
- let(:key) { deduplicated_flag_key }
- let(:from_value) { '1' }
+ describe '#set_deduplicated_flag!' do
+ context 'when the job is reschedulable' do
+ before do
+ allow(duplicate_job).to receive(:reschedulable?) { true }
end
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do
- let(:key) { existing_wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
+ it 'sets the key in Redis' do
+ duplicate_job.set_deduplicated_flag!
+
+ flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
+
+ expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
end
- it_behaves_like 'deleting keys from redis', 'existing wal location keys for ci database' do
- let(:key) { existing_wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
+ it 'sets, gets and cleans up the deduplicated flag' do
+ expect(duplicate_job.should_reschedule?).to eq(false)
+
+ duplicate_job.set_deduplicated_flag!
+ expect(duplicate_job.should_reschedule?).to eq(true)
+
+ duplicate_job.delete!
+ expect(duplicate_job.should_reschedule?).to eq(false)
end
+ end
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for main database' do
- let(:key) { wal_location_key(idempotency_key, :main) }
- let(:from_value) { wal_locations[:main] }
+ context 'when the job is not reschedulable' do
+ before do
+ allow(duplicate_job).to receive(:reschedulable?) { false }
end
- it_behaves_like 'deleting keys from redis', 'latest wal location keys for ci database' do
- let(:key) { wal_location_key(idempotency_key, :ci) }
- let(:from_value) { wal_locations[:ci] }
+ it 'does not set the key in Redis' do
+ duplicate_job.set_deduplicated_flag!
+
+ flag = with_redis { |redis| redis.get(deduplicated_flag_key) }
+
+ expect(flag).to be_nil
end
- end
- context 'when the idempotency key is not part of the job' do
- it_behaves_like 'deleting the duplicate job'
+ it 'does not set the deduplicated flag' do
+ expect(duplicate_job.should_reschedule?).to eq(false)
- it 'recalculates the idempotency hash' do
- expect(duplicate_job).to receive(:idempotency_hash).and_call_original
+ duplicate_job.set_deduplicated_flag!
+ expect(duplicate_job.should_reschedule?).to eq(false)
duplicate_job.delete!
+ expect(duplicate_job.should_reschedule?).to eq(false)
end
end
+ end
+
+ describe '#duplicate?' do
+ it "raises an error if the check wasn't performed" do
+ expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
+ end
- context 'when the idempotency key is part of the job' do
- let(:idempotency_key) { 'not the same as what we calculate' }
- let(:job) { super().merge('idempotency_key' => idempotency_key) }
+ it 'returns false if the existing jid equals the job jid' do
+ duplicate_job.check!
- it_behaves_like 'deleting the duplicate job'
+ expect(duplicate_job.duplicate?).to be(false)
+ end
- it 'does not recalculate the idempotency hash' do
- expect(duplicate_job).not_to receive(:idempotency_hash)
+ it 'returns false if the existing jid is different from the job jid' do
+ set_idempotency_key(idempotency_key, 'a different jid')
+ duplicate_job.check!
- duplicate_job.delete!
+ expect(duplicate_job.duplicate?).to be(true)
+ end
+ end
+
+ def existing_wal_location_key(idempotency_key, connection_name)
+ "#{idempotency_key}:#{connection_name}:existing_wal_location"
+ end
+
+ def wal_location_key(idempotency_key, connection_name)
+ "#{idempotency_key}:#{connection_name}:wal_location"
+ end
+
+ def set_idempotency_key(key, value = '1')
+ with_redis { |r| r.set(key, value) }
+ end
+
+ def rpush_to_redis_key(key, wal, offset)
+ with_redis { |r| r.rpush(key, [wal, offset]) }
+ end
+
+ def read_idempotency_key_with_ttl(key)
+ with_redis do |redis|
+ redis.pipelined do |p|
+ p.get(key)
+ p.ttl(key)
end
end
end
+
+ def read_range_from_redis(key)
+ with_redis do |redis|
+ redis.lrange(key, 0, -1)
+ end
+ end
+ end
+
+ context 'with multi-store feature flags turned on' do
+ def with_redis(&block)
+ Gitlab::Redis::DuplicateJobs.with(&block)
+ end
+
+ it 'use Gitlab::Redis::DuplicateJobs.with' do
+ expect(Gitlab::Redis::DuplicateJobs).to receive(:with).and_call_original
+ expect(Sidekiq).not_to receive(:redis)
+
+ duplicate_job.check!
+ end
+
+ it_behaves_like 'tracking duplicates in redis'
+ end
+
+ context 'when both multi-store feature flags are off' do
+ def with_redis(&block)
+ Sidekiq.redis(&block)
+ end
+
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_duplicate_jobs: false)
+ stub_feature_flags(use_primary_store_as_default_for_duplicate_jobs: false)
+ end
+
+ it 'use Sidekiq.redis' do
+ expect(Sidekiq).to receive(:redis).and_call_original
+ expect(Gitlab::Redis::DuplicateJobs).not_to receive(:with)
+
+ duplicate_job.check!
+ end
+
+ it_behaves_like 'tracking duplicates in redis'
end
describe '#scheduled?' do
@@ -449,75 +586,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
- describe '#set_deduplicated_flag!' do
- context 'when the job is reschedulable' do
- before do
- allow(duplicate_job).to receive(:reschedulable?) { true }
- end
-
- it 'sets the key in Redis' do
- duplicate_job.set_deduplicated_flag!
-
- flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s)
- end
-
- it 'sets, gets and cleans up the deduplicated flag' do
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.set_deduplicated_flag!
- expect(duplicate_job.should_reschedule?).to eq(true)
-
- duplicate_job.delete!
- expect(duplicate_job.should_reschedule?).to eq(false)
- end
- end
-
- context 'when the job is not reschedulable' do
- before do
- allow(duplicate_job).to receive(:reschedulable?) { false }
- end
-
- it 'does not set the key in Redis' do
- duplicate_job.set_deduplicated_flag!
-
- flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) }
-
- expect(flag).to be_nil
- end
-
- it 'does not set the deduplicated flag' do
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.set_deduplicated_flag!
- expect(duplicate_job.should_reschedule?).to eq(false)
-
- duplicate_job.delete!
- expect(duplicate_job.should_reschedule?).to eq(false)
- end
- end
- end
-
- describe '#duplicate?' do
- it "raises an error if the check wasn't performed" do
- expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
- end
-
- it 'returns false if the existing jid equals the job jid' do
- duplicate_job.check!
-
- expect(duplicate_job.duplicate?).to be(false)
- end
-
- it 'returns false if the existing jid is different from the job jid' do
- set_idempotency_key(idempotency_key, 'a different jid')
- duplicate_job.check!
-
- expect(duplicate_job.duplicate?).to be(true)
- end
- end
-
describe '#scheduled_at' do
let(:scheduled_at) { 42 }
let(:job) do
@@ -592,35 +660,4 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi
end
end
end
-
- def existing_wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:existing_wal_location"
- end
-
- def wal_location_key(idempotency_key, connection_name)
- "#{idempotency_key}:#{connection_name}:wal_location"
- end
-
- def set_idempotency_key(key, value = '1')
- Sidekiq.redis { |r| r.set(key, value) }
- end
-
- def rpush_to_redis_key(key, wal, offset)
- Sidekiq.redis { |r| r.rpush(key, [wal, offset]) }
- end
-
- def read_idempotency_key_with_ttl(key)
- Sidekiq.redis do |redis|
- redis.pipelined do |p|
- p.get(key)
- p.ttl(key)
- end
- end
- end
-
- def read_range_from_redis(key)
- Sidekiq.redis do |redis|
- redis.lrange(key, 0, -1)
- end
- end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
index 963301bc001..ab9d14ad729 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
@@ -23,8 +23,15 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut
end
end
+ it 'deletes the lock even if an error occurs' do
+ expect(fake_duplicate_job).not_to receive(:scheduled?)
+ expect(fake_duplicate_job).to receive(:delete!).once
+
+ perform_strategy_with_error
+ end
+
it 'does not reschedule the job even if deduplication happened' do
- expect(fake_duplicate_job).to receive(:delete!)
+ expect(fake_duplicate_job).to receive(:delete!).once
expect(fake_duplicate_job).not_to receive(:reschedule)
strategy.perform({}) do
@@ -33,16 +40,33 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut
end
context 'when job is reschedulable' do
- it 'reschedules the job if deduplication happened' do
+ before do
allow(fake_duplicate_job).to receive(:should_reschedule?) { true }
+ end
- expect(fake_duplicate_job).to receive(:delete!)
+ it 'reschedules the job if deduplication happened' do
+ expect(fake_duplicate_job).to receive(:delete!).once
expect(fake_duplicate_job).to receive(:reschedule).once
strategy.perform({}) do
proc.call
end
end
+
+ it 'does not reschedule the job if an error occurs' do
+ expect(fake_duplicate_job).to receive(:delete!).once
+ expect(fake_duplicate_job).not_to receive(:reschedule)
+
+ perform_strategy_with_error
+ end
+ end
+
+ def perform_strategy_with_error
+ expect do
+ strategy.perform({}) do
+ raise 'expected error'
+ end
+ end.to raise_error(RuntimeError, 'expected error')
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
index 3baa0c6f967..821d8b8fe7b 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb
@@ -50,6 +50,26 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
end
describe "#call" do
+ context 'root_caller_id' do
+ it 'uses caller_id of the current context' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'CALLER') do
+ TestWithContextWorker.perform_async
+ end
+
+ job = TestWithContextWorker.jobs.last
+ expect(job['meta.root_caller_id']).to eq('CALLER')
+ end
+
+ it 'uses root_caller_id instead of caller_id of the current context' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'CALLER', root_caller_id: 'ROOT_CALLER') do
+ TestWithContextWorker.perform_async
+ end
+
+ job = TestWithContextWorker.jobs.last
+ expect(job['meta.root_caller_id']).to eq('ROOT_CALLER')
+ end
+ end
+
it 'applies a context for jobs scheduled in batch' do
user_per_job = { 'job1' => build_stubbed(:user, username: 'user-1'),
'job2' => build_stubbed(:user, username: 'user-2') }
@@ -97,7 +117,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
end
it 'does not set any explicit feature category for mailers', :sidekiq_mailers do
- expect(Gitlab::ApplicationContext).not_to receive(:with_context)
+ expect(Gitlab::ApplicationContext).to receive(:with_context).with(hash_excluding(feature_category: anything))
TestMailer.test_mail.deliver_later
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index c94deb8e008..027697db7e1 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -3,138 +3,175 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqStatus, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do
- describe '.set' do
- it 'stores the job ID' do
- described_class.set('123')
+ shared_examples 'tracking status in redis' do
+ describe '.set' do
+ it 'stores the job ID' do
+ described_class.set('123')
+
+ key = described_class.key_for('123')
+
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(true)
+ expect(redis.ttl(key) > 0).to eq(true)
+ expect(redis.get(key)).to eq('1')
+ end
+ end
- key = described_class.key_for('123')
+ it 'allows overriding the expiration time' do
+ described_class.set('123', described_class::DEFAULT_EXPIRATION * 2)
+
+ key = described_class.key_for('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(true)
- expect(redis.ttl(key) > 0).to eq(true)
- expect(redis.get(key)).to eq('1')
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(true)
+ expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
+ expect(redis.get(key)).to eq('1')
+ end
end
- end
- it 'allows overriding the expiration time' do
- described_class.set('123', described_class::DEFAULT_EXPIRATION * 2)
+ it 'does not store anything with a nil expiry' do
+ described_class.set('123', nil)
- key = described_class.key_for('123')
+ key = described_class.key_for('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(true)
- expect(redis.ttl(key) > described_class::DEFAULT_EXPIRATION).to eq(true)
- expect(redis.get(key)).to eq('1')
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(false)
+ end
end
end
- it 'does not store anything with a nil expiry' do
- described_class.set('123', nil)
+ describe '.unset' do
+ it 'removes the job ID' do
+ described_class.set('123')
+ described_class.unset('123')
- key = described_class.key_for('123')
+ key = described_class.key_for('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(false)
+ with_redis do |redis|
+ expect(redis.exists(key)).to eq(false)
+ end
end
end
- end
- describe '.unset' do
- it 'removes the job ID' do
- described_class.set('123')
- described_class.unset('123')
+ describe '.all_completed?' do
+ it 'returns true if all jobs have been completed' do
+ expect(described_class.all_completed?(%w(123))).to eq(true)
+ end
- key = described_class.key_for('123')
+ it 'returns false if a job has not yet been completed' do
+ described_class.set('123')
- Sidekiq.redis do |redis|
- expect(redis.exists(key)).to eq(false)
+ expect(described_class.all_completed?(%w(123 456))).to eq(false)
end
end
- end
- describe '.all_completed?' do
- it 'returns true if all jobs have been completed' do
- expect(described_class.all_completed?(%w(123))).to eq(true)
- end
+ describe '.running?' do
+ it 'returns true if job is running' do
+ described_class.set('123')
- it 'returns false if a job has not yet been completed' do
- described_class.set('123')
+ expect(described_class.running?('123')).to be(true)
+ end
- expect(described_class.all_completed?(%w(123 456))).to eq(false)
+ it 'returns false if job is not found' do
+ expect(described_class.running?('123')).to be(false)
+ end
end
- end
- describe '.running?' do
- it 'returns true if job is running' do
- described_class.set('123')
+ describe '.num_running' do
+ it 'returns 0 if all jobs have been completed' do
+ expect(described_class.num_running(%w(123))).to eq(0)
+ end
+
+ it 'returns 2 if two jobs are still running' do
+ described_class.set('123')
+ described_class.set('456')
- expect(described_class.running?('123')).to be(true)
+ expect(described_class.num_running(%w(123 456 789))).to eq(2)
+ end
end
- it 'returns false if job is not found' do
- expect(described_class.running?('123')).to be(false)
+ describe '.num_completed' do
+ it 'returns 1 if all jobs have been completed' do
+ expect(described_class.num_completed(%w(123))).to eq(1)
+ end
+
+ it 'returns 1 if a job has not yet been completed' do
+ described_class.set('123')
+ described_class.set('456')
+
+ expect(described_class.num_completed(%w(123 456 789))).to eq(1)
+ end
end
- end
- describe '.num_running' do
- it 'returns 0 if all jobs have been completed' do
- expect(described_class.num_running(%w(123))).to eq(0)
+ describe '.completed_jids' do
+ it 'returns the completed job' do
+ expect(described_class.completed_jids(%w(123))).to eq(['123'])
+ end
+
+ it 'returns only the jobs completed' do
+ described_class.set('123')
+ described_class.set('456')
+
+ expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
+ end
end
- it 'returns 2 if two jobs are still running' do
- described_class.set('123')
- described_class.set('456')
+ describe '.job_status' do
+ it 'returns an array of boolean values' do
+ described_class.set('123')
+ described_class.set('456')
+ described_class.unset('123')
- expect(described_class.num_running(%w(123 456 789))).to eq(2)
+ expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false])
+ end
+
+ it 'handles an empty array' do
+ expect(described_class.job_status([])).to eq([])
+ end
end
end
- describe '.num_completed' do
- it 'returns 1 if all jobs have been completed' do
- expect(described_class.num_completed(%w(123))).to eq(1)
+ context 'with multi-store feature flags turned on' do
+ def with_redis(&block)
+ Gitlab::Redis::SidekiqStatus.with(&block)
end
- it 'returns 1 if a job has not yet been completed' do
- described_class.set('123')
- described_class.set('456')
+ it 'uses Gitlab::Redis::SidekiqStatus.with' do
+ expect(Gitlab::Redis::SidekiqStatus).to receive(:with).and_call_original
+ expect(Sidekiq).not_to receive(:redis)
- expect(described_class.num_completed(%w(123 456 789))).to eq(1)
+ described_class.job_status(%w(123 456 789))
end
- end
- describe '.key_for' do
- it 'returns the key for a job ID' do
- key = described_class.key_for('123')
+ it_behaves_like 'tracking status in redis'
+ end
- expect(key).to be_an_instance_of(String)
- expect(key).to include('123')
+ context 'when both multi-store feature flags are off' do
+ def with_redis(&block)
+ Sidekiq.redis(&block)
end
- end
- describe '.completed_jids' do
- it 'returns the completed job' do
- expect(described_class.completed_jids(%w(123))).to eq(['123'])
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_sidekiq_status: false)
+ stub_feature_flags(use_primary_store_as_default_for_sidekiq_status: false)
end
- it 'returns only the jobs completed' do
- described_class.set('123')
- described_class.set('456')
+ it 'uses Sidekiq.redis' do
+ expect(Sidekiq).to receive(:redis).and_call_original
+ expect(Gitlab::Redis::SidekiqStatus).not_to receive(:with)
- expect(described_class.completed_jids(%w(123 456 789))).to eq(['789'])
+ described_class.job_status(%w(123 456 789))
end
- end
- describe '.job_status' do
- it 'returns an array of boolean values' do
- described_class.set('123')
- described_class.set('456')
- described_class.unset('123')
+ it_behaves_like 'tracking status in redis'
+ end
- expect(described_class.job_status(%w(123 456 789))).to eq([false, true, false])
- end
+ describe '.key_for' do
+ it 'returns the key for a job ID' do
+ key = described_class.key_for('123')
- it 'handles an empty array' do
- expect(described_class.job_status([])).to eq([])
+ expect(key).to be_an_instance_of(String)
+ expect(key).to include('123')
end
end
end
diff --git a/spec/lib/gitlab/slash_commands/deploy_spec.rb b/spec/lib/gitlab/slash_commands/deploy_spec.rb
index 71fca1e1fc8..5167523ff58 100644
--- a/spec/lib/gitlab/slash_commands/deploy_spec.rb
+++ b/spec/lib/gitlab/slash_commands/deploy_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::SlashCommands::Deploy do
context 'with environment' do
let!(:staging) { create(:environment, name: 'staging', project: project) }
let!(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:build) { create(:ci_build, pipeline: pipeline) }
+ let!(:build) { create(:ci_build, pipeline: pipeline, environment: 'production') }
let!(:deployment) { create(:deployment, :success, environment: staging, deployable: build) }
context 'without actions' do
diff --git a/spec/lib/gitlab/sql/cte_spec.rb b/spec/lib/gitlab/sql/cte_spec.rb
index 18ae2cb065f..523380eae34 100644
--- a/spec/lib/gitlab/sql/cte_spec.rb
+++ b/spec/lib/gitlab/sql/cte_spec.rb
@@ -3,15 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::SQL::CTE do
- describe '#to_arel' do
+ shared_examples '#to_arel' do
it 'generates an Arel relation for the CTE body' do
- relation = User.where(id: 1)
cte = described_class.new(:cte_name, relation)
sql = cte.to_arel.to_sql
name = ApplicationRecord.connection.quote_table_name(:cte_name)
sql1 = ApplicationRecord.connection.unprepared_statement do
- relation.except(:order).to_sql
+ relation.is_a?(String) ? relation : relation.to_sql
end
expected = [
@@ -25,6 +24,20 @@ RSpec.describe Gitlab::SQL::CTE do
end
end
+ describe '#to_arel' do
+ context 'when relation is an ActiveRecord::Relation' do
+ let(:relation) { User.where(id: 1) }
+
+ include_examples '#to_arel'
+ end
+
+ context 'when relation is a String' do
+ let(:relation) { User.where(id: 1).to_sql }
+
+ include_examples '#to_arel'
+ end
+ end
+
describe '#alias_to' do
it 'returns an alias for the CTE' do
cte = described_class.new(:cte_name, nil)
diff --git a/spec/lib/gitlab/ssh/signature_spec.rb b/spec/lib/gitlab/ssh/signature_spec.rb
new file mode 100644
index 00000000000..e8d366f0762
--- /dev/null
+++ b/spec/lib/gitlab/ssh/signature_spec.rb
@@ -0,0 +1,227 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ssh::Signature do
+ # ssh-keygen -t ed25519
+ let_it_be(:committer_email) { 'ssh-commit-test@example.com' }
+ let_it_be(:public_key_text) { 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJKOfqOH0fDde+Ua/1SObkXB1CEDF5M6UfARMpW3F87u' }
+ let_it_be_with_reload(:user) { create(:user, email: committer_email) }
+ let_it_be_with_reload(:key) { create(:key, key: public_key_text, user: user) }
+
+ let(:signed_text) { 'This message was signed by an ssh key' }
+
+ let(:signature_text) do
+ # ssh-keygen -Y sign -n file -f id_test message.txt
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAECQa95KgBkgbMwIPNwHRjHu0WYrKvAc5O/FaBXlTDcPWQHi8WRDhbPNN6MqSYLg/S
+ hsei6Y8VYPv85StrEHYdoF
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ subject(:signature) do
+ described_class.new(
+ signature_text,
+ signed_text,
+ committer_email
+ )
+ end
+
+ shared_examples 'verified signature' do
+ it 'reports verified status' do
+ expect(signature.verification_status).to eq(:verified)
+ end
+ end
+
+ shared_examples 'unverified signature' do
+ it 'reports unverified status' do
+ expect(signature.verification_status).to eq(:unverified)
+ end
+ end
+
+ describe 'signature verification' do
+ context 'when signature is valid and user email is verified' do
+ it_behaves_like 'verified signature'
+ end
+
+ context 'when using an RSA key' do
+ let(:public_key_text) do
+ <<~KEY.delete("\n")
+ ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCr3ucg9tLf87S2TxgeDaO4Cs5Mzv7wwi5w
+ OnSG8hE/Zj7xzf0kXAYns/dHhPilkQMCulMQuGGprGzDJXZ9WrrVDHgBj2+kLB8cc+XYIb29
+ HPsoz5a1T776wWrzs5cw3Vbb0ZEMPG27SfJ+HtIqnIAcgBoRxgP/+I9we7tVxrTuog/9jSzU
+ H1IscwfwgKdUrvN5cyhqqxWspwZVlf6s4jaVjC9sKlF7u9CBCxqM2G7GZRKH2sEV2Tw0mT4z
+ 39UQ5uz9+4hxWChosiQChrT9zSJDGWQm3WGn5ubYPeB/xINEKkFxuEupnSK7l8PQxeLAwlcN
+ YHKMkHdO16O6PlpxvcLR1XVy4F12NXCxFjTr8GmFvJTvevf9iuFRmYQpffqm+EMN0shuhPag
+ Z1poVK7ZMO49b4HD6csGwDjXEgNAnyi7oPV1WMHVy+xi2j+yaAgiVk50kgTwp9sGkHTiMTM8
+ YWjCq+Hb+HXLINmqO5V1QChT7PAFYycmQ0Fe2x39eLLMHy0=
+ KEY
+ end
+
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAAZcAAAAHc3NoLXJzYQAAAAMBAAEAAAGBAKve5yD20t/ztLZPGB4No7
+ gKzkzO/vDCLnA6dIbyET9mPvHN/SRcBiez90eE+KWRAwK6UxC4YamsbMMldn1autUMeAGP
+ b6QsHxxz5dghvb0c+yjPlrVPvvrBavOzlzDdVtvRkQw8bbtJ8n4e0iqcgByAGhHGA//4j3
+ B7u1XGtO6iD/2NLNQfUixzB/CAp1Su83lzKGqrFaynBlWV/qziNpWML2wqUXu70IELGozY
+ bsZlEofawRXZPDSZPjPf1RDm7P37iHFYKGiyJAKGtP3NIkMZZCbdYafm5tg94H/Eg0QqQX
+ G4S6mdIruXw9DF4sDCVw1gcoyQd07Xo7o+WnG9wtHVdXLgXXY1cLEWNOvwaYW8lO969/2K
+ 4VGZhCl9+qb4Qw3SyG6E9qBnWmhUrtkw7j1vgcPpywbAONcSA0CfKLug9XVYwdXL7GLaP7
+ JoCCJWTnSSBPCn2waQdOIxMzxhaMKr4dv4dcsg2ao7lXVAKFPs8AVjJyZDQV7bHf14sswf
+ LQAAAARmaWxlAAAAAAAAAAZzaGE1MTIAAAGUAAAADHJzYS1zaGEyLTUxMgAAAYAXgXpXWw
+ A1fYHTUON+e1yrTw8AKB4ymfqpR9Zr1OUmYUKJ9xXvvyNCfKHL6XD14CkMu1Tx8Z3TTPG9
+ C6uAXBniKRwwaLVOKffZMshf5sbjcy65KkqBPC7n/cDiCAeoJ8Y05trEDV62+pOpB2lLdv
+ pwwg2o0JaoLbdRcKCD0pw1u0O7VDDngTKFZ4ghHrEslxwlFruht1h9hs3rmdITlT0RMNuU
+ PHGAIB56u4E4UeoMd3D5rga+4Boj0s6551VgP3vCmcz9ZojPHhTCQdUZU1yHdEBTadYTq6
+ UWHhQwDCUDkSNKCRxWo6EyKZQeTakedAt4qkdSpSUCKOJGWKmPOfAm2/sDEmSxffRdxRRg
+ QUe8lklyFTZd6U/ZkJ/y7VR46fcSkEqLSLd9jAZT/3HJXbZfULpwsTcvcLcJLkCuzHEaU1
+ LRyJBsanLCYHTv7ep5PvIuAngUWrXK2eb7oacVs94mWXfs1PG482Ym4+bZA5u0QliGTVaC
+ M2EMhRTf0cqFuA4=
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ before do
+ key.update!(key: public_key_text)
+ end
+
+ it_behaves_like 'verified signature'
+ end
+
+ context 'when signed text is an empty string' do
+ let(:signed_text) { '' }
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAEC1y2I7o3KqKFlnM+MLkhIo+uRX3YQOYCqycfibyfvmkZTcwqMxgNBInBM9pY3VvS
+ sbW2iEdgz34agHbi+1BHIM
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'verified signature'
+ end
+
+ context 'when signed text is nil' do
+ let(:signed_text) { nil }
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAEC1y2I7o3KqKFlnM+MLkhIo+uRX3YQOYCqycfibyfvmkZTcwqMxgNBInBM9pY3VvS
+ sbW2iEdgz34agHbi+1BHIM
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when committer_email is empty' do
+ let(:committer_email) { '' }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when committer_email is nil' do
+ let(:committer_email) { nil }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature_text is empty' do
+ let(:signature_text) { '' }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature_text is nil' do
+ let(:signature_text) { nil }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when user email is not verified' do
+ before do
+ user.update!(confirmed_at: nil)
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when no user exists with the committer email' do
+ let(:committer_email) { 'different-email+ssh-commit-test@example.com' }
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature is invalid' do
+ let(:signature_text) do
+ # truncated base64
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgko5+o4fR8N175Rr/VI5uRcHUIQ
+ MXkzpR8BEylbcXzu4AAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAECQa95KgBkgbMwIPNwHRjHu0WYrKvAc5O/FaBXlTDcPWQHi8WRDhbPNN6MqSYLg/S
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when signature is for a different message' do
+ let(:signature_text) do
+ <<~SIG
+ -----BEGIN SSH SIGNATURE-----
+ U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAgQtog20+l2pMcPnuoaWXuNpw9u7
+ OzPnJzdLUon0+ELNQAAAAEZmlsZQAAAAAAAAAGc2hhNTEyAAAAUwAAAAtzc2gtZWQyNTUx
+ OQAAAEB3/B+6c3+XqEuqjiqlVQwQmUdj8WquROtkhdtScEOP8GXcGQx+aaQs5nq4ZJCuu5
+ ywcU+4xQaLVpCf7tfGWa4K
+ -----END SSH SIGNATURE-----
+ SIG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when message has been tampered' do
+ let(:signed_text) do
+ <<~MSG
+ This message was signed by an ssh key
+ The pubkey fingerprint is SHA256:RjzeOilYHkiHqz5fefdnrWr8qn5nbroAisuuTMoH9PU
+ MSG
+ end
+
+ it_behaves_like 'unverified signature'
+ end
+
+ context 'when key does not exist in GitLab' do
+ before do
+ key.delete
+ end
+
+ it 'reports unknown_key status' do
+ expect(signature.verification_status).to eq(:unknown_key)
+ end
+ end
+
+ context 'when key belongs to someone other than the committer' do
+ let_it_be(:other_user) { create(:user, email: 'other-user@example.com') }
+
+ let(:committer_email) { other_user.email }
+
+ it 'reports other_user status' do
+ expect(signature.verification_status).to eq(:other_user)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ssh_public_key_spec.rb b/spec/lib/gitlab/ssh_public_key_spec.rb
index 422b6f925a1..114a18cf99a 100644
--- a/spec/lib/gitlab/ssh_public_key_spec.rb
+++ b/spec/lib/gitlab/ssh_public_key_spec.rb
@@ -334,6 +334,107 @@ RSpec.describe Gitlab::SSHPublicKey, lib: true, fips_mode: false do
include_examples 'raises error when the key is represented by a class that is not in the list of supported technologies'
end
+ describe '#banned?' do
+ subject { public_key.banned? }
+
+ where(:key) do
+ [
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2y' \
+ 'x5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW' \
+ '26s2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIBnZQ+6nhlPX/JnX5i5hXpljJ89bSnnrsSs51' \
+ 'hSPuoJGmoKowBddISK7s10AIpO0xAWGcr8PUr2FOjEBbDHqlRxoXF0Ocms9xv3ql9EYUQ5' \
+ '+U+M6BymWhNTFPOs6gFHUl8Bw3t6c+SRKBpfRFB0yzBj9d093gSdfTAFoz+yLo4vRw==',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAvIhC5skTzxyHif/7iy3yhxuK6/OB13hjPq' \
+ 'rskogkYFrcW8OK4VJT+5+Fx7wd4sQCnVn8rNqahw/x6sfcOMDI/Xvn4yKU4t8TnYf2MpUV' \
+ 'r4ndz39L5Ds1n7Si1m2suUNxWbKv58I8+NMhlt2ITraSuTU0NGymWOc8+LNi+MHXdLk= SCCP Superuser',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr' \
+ '+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6' \
+ 'IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5' \
+ 'y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0j' \
+ 'MZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98' \
+ 'OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2yx' \
+ '5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW26' \
+ 's2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost',
+
+ 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAn8LoId2N5i28cNKuEWWea3yt0I/LdT/NRO' \
+ 'rF44WZewtxch+DIwteQhM1qL6EKUSqz3Q2geX1crpOsNnyh67xy5lNo086u/QewOCSRAUG' \
+ 'rQCXqFQ4JU8ny/qugWALQHjbIaPHj/3zMK09r4cpTSeAU7CW5nQyTKGmh7v9CAfWfcs= adam@localhost.localdomain',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAJTDsX+8olPZeyr58g9XE0L8PKT5030NZBPlE7np4h' \
+ 'Bqx36HoWarWq1Csn8M57dWN9StKbs03k2ggY6sYJK5AW2EWar70um3pYjKQHiZq7mITmit' \
+ 'sozFN/K7wu2e2iKRgquUwH5SuYoOJ29n7uhaILXiKZP4/H/dDudqPRSY6tJPAAAAFQDtuW' \
+ 'H90mDbU2L/Ms2lfl/cja/wHwAAAIAMBwSHZt2ysOHCFe1WLUvdwVDHUqk3QHTskuuAnMlw' \
+ 'MtSvCaUxSatdHahsMZ9VCHjoQUx6j+TcgRLDbMlRLnwUlb6wpniehLBFk+qakGcREqks5N' \
+ 'xYzFTJXwROzP72jPvVgQyOZHWq81gCild/ljL7hmrduCqYwxDIz4o7U92UKQAAAIBmhSl9' \
+ 'CVPgVMv1xO8DAHVhM1huIIK8mNFrzMJz+JXzBx81ms1kWSeQOC/nraaXFTBlqiQsvB8tzr' \
+ '4xZdbaI/QzVLKNAF5C8BJ4ScNlTIx1aZJwyMil8Nzb+0YAsw5Ja+bEZZvEVlAYnd10qRWr' \
+ 'PeEY1txLMmX3wDa+JvJL7fmuBg==',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAMq5EcIFdfCjJakyQnP/BBp9oc6mpaZVguf0Znp5C4' \
+ '0twiG1lASQJZlM1qOB/hkBWYeBCHUkcOLEnVXSZzB62L+W/LGKodqnsiQPRr57AA6jPc6m' \
+ 'NBnejHai8cSdAl9n/0s2IQjdcrxM8CPq2uEyfm0J3AV6Lrbbxr5NgE5xxM+DAAAAFQCmFk' \
+ '/M7Rx2jexsJ9COpHkHwUjcNQAAAIAdg18oByp/tjjDKhWhmmv+HbVIROkRqSxBvuEZEmcW' \
+ 'lg38mLIT1bydfpSou/V4rI5ctxwCfJ1rRr66pw6GwCrz4fXmyVlhrj7TrktyQ9+zRXhynF' \
+ '4wdNPWErhNHb8tGlSOFiOBcUTlouX3V/ka6Dkd6ZQrZLQFaH+gjfyTZZ82HQAAAIEArsJg' \
+ 'p7RLPOsCeLqoia/eljseBFVDazO5Q0ysUotTw9wgXGGVWREwm8wNggFNb9eCiBAAUfVZVf' \
+ 'hVAtFT0pBf/eIVLPXyaMw3prBt7LqeBrbagODc3WAAdMTPIdYYcOKgv+YvTXa51zG64v6p' \
+ 'QOfS8WXgKCzDl44puXfYeDk5lVQ=',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAKwKBw7D4OA1H/uD4htdh04TBIHdbSjeXUSnWJsce8' \
+ 'C0tvoB01Yarjv9TFj+tfeDYVWtUK1DA1JkyqSuoAtDANJzF4I6Isyd0KPrW3dHFTcg6Xlz' \
+ '8d3KEaHokY93NOmB/xWEkhme8b7Q0U2iZie2pgWbTLXV0FA+lhskTtPHW3+VAAAAFQDRya' \
+ 'yUlVZKXEweF3bUe03zt9e8VQAAAIAEPK1k3Y6ErAbIl96dnUCnZjuWQ7xXy062pf63QuRW' \
+ 'I6LYSscm3f1pEknWUNFr/erQ02pkfi2eP9uHl1TI1ql+UmJX3g3frfssLNZwWXAW0m8PbY' \
+ '3HZSs+f5hevM3ua32pnKDmbQ2WpvKNyycKHi81hSI14xMcdblJolhN5iY8/wAAAIAjEe5+' \
+ '0m/TlBtVkqQbUit+s/g+eB+PFQ+raaQdL1uztW3etntXAPH1MjxsAC/vthWYSTYXORkDFM' \
+ 'hrO5ssE2rfg9io0NDyTIZt+VRQMGdi++dH8ptU+ldl2ZejLFdTJFwFgcfXz+iQ1mx6h9TP' \
+ 'X1crE1KoMAVOj3yKVfKpLB1EkA== root@lbslave',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAN3AITryJMQyOKZjAky+mQ/8pOHIlu4q8pzmR0qotK' \
+ 'aLm2yye5a0PY2rOaQRAzi7EPheBXbqTb8a8TrHhGXI5P7GUHaJho5HhEnw+5TwAvP72L7L' \
+ 'cPwxMxj/rLcR/jV+uLMsVeJVWjwJcUv83yzPXoVjK0hrIm+RLLeuTM+gTylHAAAAFQD5gB' \
+ 'dXsXAiTz1atzMg3xDFF1zlowAAAIAlLy6TCMlOBM0IcPsvP/9bEjDj0M8YZazdqt4amO2I' \
+ 'aNUPYt9/sIsLOQfxIj8myDK1TOp8NyRJep7V5aICG4f3Q+XktlmLzdWn3sjvbWuIAXe1op' \
+ 'jG2T69YhxfHZr8Wn7P4tpCgyqM4uHmUKrfnBzQQ9vkUUWsZoUXM2Z7vUXVfQAAAIAU6eNl' \
+ 'phQWDwx0KOBiiYhF9BM6kDbQlyw8333rAG3G4CcjI2G8eYGtpBNliaD185UjCEsjPiudhG' \
+ 'il/j4Zt/+VY3aGOLoi8kqXBBc8ZAML9bbkXpyhQhMgwiywx3ciFmvSn2UAin8yurStYPQx' \
+ 'tXauZN5PYbdwCHPS7ApIStdpMA== wood@endec1',
+
+ 'ssh-dss AAAAB3NzaC1kc3MAAACBAISAE3CAX4hsxTw0dRc0gx8nQ41r3Vkj9OmG6LGeKW' \
+ 'Rmpy7C6vaExuupjxid76fd4aS56lCUEEoRlJ3zE93qoK9acI6EGqGQFLuDZ0fqMyRSX+il' \
+ 'f+1HDo/TRyuraggxp9Hj9LMpZVbpFATMm0+d9Xs7eLmaJjuMsowNlOf8NFdHAAAAFQCwdv' \
+ 'qOAkR6QhuiAapQ/9iVuR0UAQAAAIBpLMo4dhSeWkChfv659WLPftxRrX/HR8YMD/jqa3R4' \
+ 'PsVM2g6dQ1191nHugtdV7uaMeOqOJ/QRWeYM+UYwT0Zgx2LqvgVSjNDfdjk+ZRY8x3SmEx' \
+ 'Fi62mKFoTGSOCXfcAfuanjaoF+sepnaiLUd+SoJShGYHoqR2QWiysTRqknlwAAAIBLEgYm' \
+ 'r9XCSqjENFDVQPFELYKT7Zs9J87PjPS1AP0qF1OoRGZ5mefK6X/6VivPAUWmmmev/BuAs8' \
+ 'M1HtfGeGGzMzDIiU/WZQ3bScLB1Ykrcjk7TOFD6xrnk/inYAp5l29hjidoAONcXoHmUAMY' \
+ 'OKqn63Q2AsDpExVcmfj99/BlpQ=='
+ ]
+ end
+
+ with_them do
+ it { is_expected.to be true }
+ end
+
+ context 'with a valid SSH key' do
+ let(:key) { attributes_for(:rsa_key_2048)[:key] }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with an invalid SSH key' do
+ let(:key) { 'this is not a key' }
+
+ it { is_expected.to be false }
+ end
+ end
+
describe '#fingerprint' do
subject { public_key.fingerprint }
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
deleted file mode 100644
index 9ce6007165b..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
+++ /dev/null
@@ -1,245 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Global do
- let(:global) { described_class.new(hash) }
- let(:default_image_upload_path_value) { 'source/images' }
-
- let(:default_mounts_value) do
- [
- {
- source: 'source',
- target: ''
- }
- ]
- end
-
- let(:default_static_site_generator_value) { 'middleman' }
-
- shared_examples_for 'valid default configuration' do
- describe '#compose!' do
- before do
- global.compose!
- end
-
- it 'creates nodes hash' do
- expect(global.descendants).to be_an Array
- end
-
- it 'creates node object for each entry' do
- expect(global.descendants.count).to eq 3
- end
-
- it 'creates node object using valid class' do
- expect(global.descendants.map(&:class)).to match_array(expected_node_object_classes)
- end
-
- it 'sets a description containing "Static Site Editor" for all nodes' do
- expect(global.descendants.map(&:description)).to all(match(/Static Site Editor/))
- end
-
- describe '#leaf?' do
- it 'is not leaf' do
- expect(global).not_to be_leaf
- end
- end
- end
-
- context 'when not composed' do
- describe '#static_site_generator_value' do
- it 'returns nil' do
- expect(global.static_site_generator_value).to be nil
- end
- end
-
- describe '#leaf?' do
- it 'is leaf' do
- expect(global).to be_leaf
- end
- end
- end
-
- context 'when composed' do
- before do
- global.compose!
- end
-
- describe '#errors' do
- it 'has no errors' do
- expect(global.errors).to be_empty
- end
- end
-
- describe '#image_upload_path_value' do
- it 'returns correct values' do
- expect(global.image_upload_path_value).to eq(default_image_upload_path_value)
- end
- end
-
- describe '#mounts_value' do
- it 'returns correct values' do
- expect(global.mounts_value).to eq(default_mounts_value)
- end
- end
-
- describe '#static_site_generator_value' do
- it 'returns correct values' do
- expect(global.static_site_generator_value).to eq(default_static_site_generator_value)
- end
- end
- end
- end
-
- describe '.nodes' do
- it 'returns a hash' do
- expect(described_class.nodes).to be_a(Hash)
- end
-
- context 'when filtering all the entry/node names' do
- it 'contains the expected node names' do
- expected_node_names = %i[
- image_upload_path
- mounts
- static_site_generator
- ]
- expect(described_class.nodes.keys).to match_array(expected_node_names)
- end
- end
- end
-
- context 'when configuration is valid' do
- context 'when some entries defined' do
- let(:expected_node_object_classes) do
- [
- Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath,
- Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts,
- Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
- ]
- end
-
- let(:hash) do
- {
- image_upload_path: default_image_upload_path_value,
- mounts: default_mounts_value,
- static_site_generator: default_static_site_generator_value
- }
- end
-
- it_behaves_like 'valid default configuration'
- end
- end
-
- context 'when value is an empty hash' do
- let(:expected_node_object_classes) do
- [
- Gitlab::Config::Entry::Unspecified,
- Gitlab::Config::Entry::Unspecified,
- Gitlab::Config::Entry::Unspecified
- ]
- end
-
- let(:hash) { {} }
-
- it_behaves_like 'valid default configuration'
- end
-
- context 'when configuration is not valid' do
- before do
- global.compose!
- end
-
- context 'when a single entry is invalid' do
- let(:hash) do
- { image_upload_path: { not_a_string: true } }
- end
-
- describe '#errors' do
- it 'reports errors' do
- expect(global.errors)
- .to include 'image_upload_path config should be a string'
- end
- end
- end
-
- context 'when a multiple entries are invalid' do
- let(:hash) do
- {
- image_upload_path: { not_a_string: true },
- static_site_generator: { not_a_string: true }
- }
- end
-
- describe '#errors' do
- it 'reports errors' do
- expect(global.errors)
- .to match_array([
- 'image_upload_path config should be a string',
- 'static_site_generator config should be a string',
- "static_site_generator config should be 'middleman'"
- ])
- end
- end
- end
-
- context 'when there is an invalid key' do
- let(:hash) do
- { invalid_key: true }
- end
-
- describe '#errors' do
- it 'reports errors' do
- expect(global.errors)
- .to include 'global config contains unknown keys: invalid_key'
- end
- end
- end
- end
-
- context 'when value is not a hash' do
- let(:hash) { [] }
-
- describe '#valid?' do
- it 'is not valid' do
- expect(global).not_to be_valid
- end
- end
-
- describe '#errors' do
- it 'returns error about invalid type' do
- expect(global.errors.first).to match /should be a hash/
- end
- end
- end
-
- describe '#specified?' do
- it 'is concrete entry that is defined' do
- expect(global.specified?).to be true
- end
- end
-
- describe '#[]' do
- before do
- global.compose!
- end
-
- let(:hash) do
- { static_site_generator: default_static_site_generator_value }
- end
-
- context 'when entry exists' do
- it 'returns correct entry' do
- expect(global[:static_site_generator])
- .to be_an_instance_of Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
- expect(global[:static_site_generator].value).to eq default_static_site_generator_value
- end
- end
-
- context 'when entry does not exist' do
- it 'always return unspecified node' do
- expect(global[:some][:unknown][:node])
- .not_to be_specified
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
deleted file mode 100644
index c2b7fbf6f98..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath do
- subject(:image_upload_path_entry) { described_class.new(config) }
-
- describe 'validations' do
- context 'with a valid config' do
- let(:config) { 'an-image-upload-path' }
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns a image_upload_path key' do
- expect(image_upload_path_entry.value).to eq config
- end
- end
- end
-
- context 'with an invalid config' do
- let(:config) { { not_a_string: true } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports errors about wrong type' do
- expect(image_upload_path_entry.errors)
- .to include 'image upload path config should be a string'
- end
- end
- end
-
- describe '.default' do
- it 'returns default image_upload_path' do
- expect(described_class.default).to eq 'source/images'
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
deleted file mode 100644
index 04248fc60a5..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mount do
- subject(:entry) { described_class.new(config) }
-
- describe 'validations' do
- context 'with a valid config' do
- context 'and target is a non-empty string' do
- let(:config) do
- {
- source: 'source',
- target: 'sub-site'
- }
- end
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns mount configuration' do
- expect(entry.value).to eq config
- end
- end
- end
-
- context 'and target is an empty string' do
- let(:config) do
- {
- source: 'source',
- target: ''
- }
- end
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns mount configuration' do
- expect(entry.value).to eq config
- end
- end
- end
- end
-
- context 'with an invalid config' do
- context 'when source is not a string' do
- let(:config) { { source: 123, target: 'target' } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include 'mount source should be a string'
- end
- end
-
- context 'when source is not present' do
- let(:config) { { target: 'target' } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include "mount source can't be blank"
- end
- end
-
- context 'when target is not a string' do
- let(:config) { { source: 'source', target: 123 } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include 'mount target should be a string'
- end
- end
-
- context 'when there is an unknown key present' do
- let(:config) { { test: 100 } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports error' do
- expect(entry.errors)
- .to include 'mount config contains unknown keys: test'
- end
- end
- end
- end
-
- describe '.default' do
- it 'returns default mount' do
- expect(described_class.default)
- .to eq({
- source: 'source',
- target: ''
- })
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
deleted file mode 100644
index 0ae2ece9474..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts do
- subject(:entry) { described_class.new(config) }
-
- describe 'validations' do
- context 'with a valid config' do
- let(:config) do
- [
- {
- source: 'source',
- target: ''
- },
- {
- source: 'sub-site/source',
- target: 'sub-site'
- }
- ]
- end
-
- it { is_expected.to be_valid }
-
- describe '#value' do
- it 'returns mounts configuration' do
- expect(entry.value).to eq config
- end
- end
- end
-
- context 'with an invalid config' do
- let(:config) { { not_an_array: true } }
-
- it { is_expected.not_to be_valid }
-
- it 'reports errors about wrong type' do
- expect(entry.errors)
- .to include 'mounts config should be a array'
- end
- end
- end
-
- describe '.default' do
- it 'returns default mounts' do
- expect(described_class.default)
- .to eq([{
- source: 'source',
- target: ''
- }])
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
deleted file mode 100644
index a9c730218cf..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator do
- let(:static_site_generator) { described_class.new(config) }
-
- describe 'validations' do
- context 'when value is valid' do
- let(:config) { 'middleman' }
-
- describe '#value' do
- it 'returns a static_site_generator key' do
- expect(static_site_generator.value).to eq config
- end
- end
-
- describe '#valid?' do
- it 'is valid' do
- expect(static_site_generator).to be_valid
- end
- end
- end
-
- context 'when value is invalid' do
- let(:config) { 'not-a-valid-generator' }
-
- describe '#valid?' do
- it 'is not valid' do
- expect(static_site_generator).not_to be_valid
- end
- end
- end
-
- context 'when value has a wrong type' do
- let(:config) { { not_a_string: true } }
-
- it 'reports errors about wrong type' do
- expect(static_site_generator.errors)
- .to include 'static site generator config should be a string'
- end
- end
- end
-
- describe '.default' do
- it 'returns default static_site_generator' do
- expect(described_class.default).to eq 'middleman'
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
deleted file mode 100644
index d444d4f1df7..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
+++ /dev/null
@@ -1,87 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig do
- let(:config) do
- described_class.new(yml)
- end
-
- context 'when config is valid' do
- context 'when config has valid values' do
- let(:yml) do
- <<-EOS
- static_site_generator: middleman
- EOS
- end
-
- describe '#to_hash_with_defaults' do
- it 'returns hash created from string' do
- expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
- end
- end
-
- describe '#valid?' do
- it 'is valid' do
- expect(config).to be_valid
- end
-
- it 'has no errors' do
- expect(config.errors).to be_empty
- end
- end
- end
- end
-
- context 'when a config entry has an empty value' do
- let(:yml) { 'static_site_generator: ' }
-
- describe '#to_hash' do
- it 'returns default value' do
- expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
- end
- end
-
- describe '#valid?' do
- it 'is valid' do
- expect(config).to be_valid
- end
-
- it 'has no errors' do
- expect(config.errors).to be_empty
- end
- end
- end
-
- context 'when config is invalid' do
- context 'when yml is incorrect' do
- let(:yml) { '// invalid' }
-
- describe '.new' do
- it 'raises error' do
- expect { config }.to raise_error(described_class::ConfigError, /Invalid configuration format/)
- end
- end
- end
-
- context 'when config value exists but is not a valid value' do
- let(:yml) { 'static_site_generator: "unsupported-generator"' }
-
- describe '#valid?' do
- it 'is not valid' do
- expect(config).not_to be_valid
- end
-
- it 'has errors' do
- expect(config.errors).not_to be_empty
- end
- end
-
- describe '#errors' do
- it 'returns an array of strings' do
- expect(config.errors).to all(be_an_instance_of(String))
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
deleted file mode 100644
index 8cd3feba339..00000000000
--- a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
+++ /dev/null
@@ -1,127 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
- subject(:config) { described_class.new(repository, ref, path, return_url) }
-
- let_it_be(:namespace) { create(:namespace, name: 'namespace') }
- let_it_be(:root_group) { create(:group, name: 'group') }
- let_it_be(:subgroup) { create(:group, name: 'subgroup', parent: root_group) }
- let_it_be(:project) { create(:project, :public, :repository, name: 'project', namespace: namespace) }
- let_it_be(:project_with_subgroup) { create(:project, :public, :repository, name: 'project', group: subgroup) }
- let_it_be(:repository) { project.repository }
-
- let(:ref) { 'master' }
- let(:path) { 'README.md' }
- let(:return_url) { 'http://example.com' }
-
- describe '#data' do
- subject { config.data }
-
- it 'returns data for the frontend component' do
- is_expected
- .to match({
- branch: 'master',
- commit_id: repository.commit.id,
- namespace: 'namespace',
- path: 'README.md',
- project: 'project',
- project_id: project.id,
- return_url: 'http://example.com',
- is_supported_content: true,
- base_url: '/namespace/project/-/sse/master%2FREADME.md',
- merge_requests_illustration_path: %r{illustrations/merge_requests}
- })
- end
-
- context 'when namespace is a subgroup' do
- let(:repository) { project_with_subgroup.repository }
-
- it 'returns data for the frontend component' do
- is_expected.to include(
- namespace: 'group/subgroup',
- project: 'project',
- base_url: '/group/subgroup/project/-/sse/master%2FREADME.md'
- )
- end
- end
-
- context 'when file has .md.erb extension' do
- before do
- repository.create_file(
- project.creator,
- path,
- '',
- message: 'message',
- branch_name: ref
- )
- end
-
- let(:ref) { 'main' }
- let(:path) { 'README.md.erb' }
-
- it { is_expected.to include(branch: ref, is_supported_content: true) }
- end
-
- context 'when file path is nested' do
- let(:path) { 'lib/README.md' }
-
- it { is_expected.to include(base_url: '/namespace/project/-/sse/master%2Flib%2FREADME.md') }
- end
-
- context 'when branch is not master or main' do
- let(:ref) { 'my-branch' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when file does not have a markdown extension' do
- let(:path) { 'README.txt' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when file does not have an extension' do
- let(:path) { 'README' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when file does not exist' do
- let(:path) { 'UNKNOWN.md' }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when repository is empty' do
- let(:repository) { create(:project_empty_repo).repository }
-
- it { is_expected.to include(is_supported_content: false) }
- end
-
- context 'when return_url is not a valid URL' do
- let(:return_url) { 'example.com' }
-
- it { is_expected.to include(return_url: nil) }
- end
-
- context 'when return_url has a javascript scheme' do
- let(:return_url) { 'javascript:alert(document.domain)' }
-
- it { is_expected.to include(return_url: nil) }
- end
-
- context 'when return_url is missing' do
- let(:return_url) { nil }
-
- it { is_expected.to include(return_url: nil) }
- end
-
- context 'when a commit for the ref cannot be found' do
- let(:ref) { 'nonexistent-ref' }
-
- it { is_expected.to include(commit_id: nil) }
- end
- end
-end
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index 8d5a39baf77..098a58bff83 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ::Gitlab::SubscriptionPortal do
using RSpec::Parameterized::TableSyntax
+ include SubscriptionPortalHelper
let(:env_value) { nil }
@@ -13,9 +14,9 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
describe '.default_subscriptions_url' do
where(:test, :development, :result) do
- false | false | 'https://customers.gitlab.com'
- false | true | 'https://customers.staging.gitlab.com'
- true | false | 'https://customers.staging.gitlab.com'
+ false | false | prod_customers_url
+ false | true | staging_customers_url
+ true | false | staging_customers_url
end
before do
@@ -34,7 +35,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
subject { described_class.subscriptions_url }
context 'when CUSTOMER_PORTAL_URL ENV is unset' do
- it { is_expected.to eq('https://customers.staging.gitlab.com') }
+ it { is_expected.to eq(staging_customers_url) }
end
context 'when CUSTOMER_PORTAL_URL ENV is set' do
@@ -54,17 +55,17 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
context 'url methods' do
where(:method_name, :result) do
- :default_subscriptions_url | 'https://customers.staging.gitlab.com'
- :payment_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_validation'
- :payment_validation_form_id | 'payment_method_validation'
- :registration_validation_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_registration_validation'
- :subscriptions_graphql_url | 'https://customers.staging.gitlab.com/graphql'
- :subscriptions_more_minutes_url | 'https://customers.staging.gitlab.com/buy_pipeline_minutes'
- :subscriptions_more_storage_url | 'https://customers.staging.gitlab.com/buy_storage'
- :subscriptions_manage_url | 'https://customers.staging.gitlab.com/subscriptions'
- :subscriptions_instance_review_url | 'https://customers.staging.gitlab.com/instance_review'
- :subscriptions_gitlab_plans_url | 'https://customers.staging.gitlab.com/gitlab_plans'
- :edit_account_url | 'https://customers.staging.gitlab.com/customers/edit'
+ :default_subscriptions_url | staging_customers_url
+ :payment_form_url | "#{staging_customers_url}/payment_forms/cc_validation"
+ :payment_validation_form_id | 'payment_method_validation'
+ :registration_validation_form_url | "#{staging_customers_url}/payment_forms/cc_registration_validation"
+ :subscriptions_graphql_url | "#{staging_customers_url}/graphql"
+ :subscriptions_more_minutes_url | "#{staging_customers_url}/buy_pipeline_minutes"
+ :subscriptions_more_storage_url | "#{staging_customers_url}/buy_storage"
+ :subscriptions_manage_url | "#{staging_customers_url}/subscriptions"
+ :subscriptions_instance_review_url | "#{staging_customers_url}/instance_review"
+ :subscriptions_gitlab_plans_url | "#{staging_customers_url}/gitlab_plans"
+ :edit_account_url | "#{staging_customers_url}/customers/edit"
end
with_them do
@@ -79,7 +80,10 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/extra_seats"
+ is_expected.to eq(url)
+ end
end
describe '.upgrade_subscription_url' do
@@ -88,7 +92,10 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
let(:plan_id) { 5 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}"
+ is_expected.to eq(url)
+ end
end
describe '.renew_subscription_url' do
@@ -96,6 +103,9 @@ RSpec.describe ::Gitlab::SubscriptionPortal do
let(:group_id) { 153 }
- it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/renew") }
+ it do
+ url = "#{staging_customers_url}/gitlab/namespaces/#{group_id}/renew"
+ is_expected.to eq(url)
+ end
end
end
diff --git a/spec/lib/gitlab/themes_spec.rb b/spec/lib/gitlab/themes_spec.rb
index c9dc23d7c14..a41f7d927fe 100644
--- a/spec/lib/gitlab/themes_spec.rb
+++ b/spec/lib/gitlab/themes_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Themes, lib: true do
css = described_class.body_classes
expect(css).to include('ui-indigo')
- expect(css).to include('ui-dark')
+ expect(css).to include('ui-gray')
expect(css).to include('ui-blue')
end
end
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Themes, lib: true do
describe '.by_id' do
it 'returns a Theme by its ID' do
expect(described_class.by_id(1).name).to eq 'Indigo'
- expect(described_class.by_id(3).name).to eq 'Light'
+ expect(described_class.by_id(3).name).to eq 'Light Gray'
end
end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index c88b0af30f6..508b33949a8 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -92,6 +92,34 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
end
+ context 'with incorrect argument type' do
+ context 'when standard_context_type_check FF is disabled' do
+ before do
+ stub_feature_flags(standard_context_type_check: false)
+ end
+
+ subject { described_class.new(project: create(:group)) }
+
+ it 'does not call `track_and_raise_for_dev_exception`' do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+ snowplow_context
+ end
+ end
+
+ context 'when standard_context_type_check FF is enabled' do
+ before do
+ stub_feature_flags(standard_context_type_check: true)
+ end
+
+ subject { described_class.new(project: create(:group)) }
+
+ it 'does call `track_and_raise_for_dev_exception`' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ snowplow_context
+ end
+ end
+ end
+
it 'contains user id' do
expect(snowplow_context.to_json[:data].keys).to include(:user_id)
end
diff --git a/spec/lib/gitlab/updated_notes_paginator_spec.rb b/spec/lib/gitlab/updated_notes_paginator_spec.rb
deleted file mode 100644
index ce6a7719fb4..00000000000
--- a/spec/lib/gitlab/updated_notes_paginator_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UpdatedNotesPaginator do
- let(:issue) { create(:issue) }
-
- let(:project) { issue.project }
- let(:finder) { NotesFinder.new(user, target: issue, last_fetched_at: last_fetched_at) }
- let(:user) { issue.author }
-
- let!(:page_1) { create_list(:note, 2, noteable: issue, project: project, updated_at: 2.days.ago) }
- let!(:page_2) { [create(:note, noteable: issue, project: project, updated_at: 1.day.ago)] }
-
- let(:page_1_boundary) { page_1.last.updated_at + NotesFinder::FETCH_OVERLAP }
-
- around do |example|
- freeze_time do
- example.run
- end
- end
-
- before do
- stub_const("Gitlab::UpdatedNotesPaginator::LIMIT", 2)
- end
-
- subject(:paginator) { described_class.new(finder.execute, last_fetched_at: last_fetched_at) }
-
- describe 'last_fetched_at: start of time' do
- let(:last_fetched_at) { Time.at(0) }
-
- it 'calculates the first page of notes', :aggregate_failures do
- expect(paginator.notes).to match_array(page_1)
- expect(paginator.metadata).to match(
- more: true,
- last_fetched_at: microseconds(page_1_boundary)
- )
- end
- end
-
- describe 'last_fetched_at: start of final page' do
- let(:last_fetched_at) { page_1_boundary }
-
- it 'calculates a final page', :aggregate_failures do
- expect(paginator.notes).to match_array(page_2)
- expect(paginator.metadata).to match(
- more: false,
- last_fetched_at: microseconds(Time.zone.now)
- )
- end
- end
-
- # Convert a time to an integer number of microseconds
- def microseconds(time)
- (time.to_i * 1_000_000) + time.usec
- end
-end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb
index 4c86410d609..b7da9b27e19 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_metric_spec.rb
@@ -4,15 +4,30 @@ require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountImportedProjectsMetric do
let_it_be(:user) { create(:user) }
- let_it_be(:gitea_imports) do
- create_list(:project, 3, import_type: 'gitea', creator_id: user.id, created_at: 3.weeks.ago)
+
+ # Project records have to be created chronologically, because of
+ # metric SQL query optimizations that rely on the fact that `id`s
+ # increment monotonically over time.
+ #
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/89701
+ let_it_be(:old_import) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 2.months.ago) }
+ let_it_be(:gitea_import_1) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 21.days.ago) }
+
+ let_it_be(:gitea_import_2) do
+ create(:project, import_type: 'gitea', creator_id: user.id, created_at: 20.days.ago)
end
- let_it_be(:bitbucket_imports) do
- create_list(:project, 2, import_type: 'bitbucket', creator_id: user.id, created_at: 3.weeks.ago)
+ let_it_be(:gitea_import_3) do
+ create(:project, import_type: 'gitea', creator_id: user.id, created_at: 19.days.ago)
end
- let_it_be(:old_import) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 2.months.ago) }
+ let_it_be(:bitbucket_import_1) do
+ create(:project, import_type: 'bitbucket', creator_id: user.id, created_at: 2.weeks.ago)
+ end
+
+ let_it_be(:bitbucket_import_2) do
+ create(:project, import_type: 'bitbucket', creator_id: user.id, created_at: 1.week.ago)
+ end
context 'with import_type gitea' do
context 'with all time frame' do
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb
new file mode 100644
index 00000000000..bfc4240def6
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_imported_projects_total_metric_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountImportedProjectsTotalMetric do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:gitea_imports) do
+ create_list(:project, 3, import_type: 'gitea', creator_id: user.id, created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:bitbucket_imports) do
+ create_list(:project, 2, import_type: 'bitbucket', creator_id: user.id, created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:old_import) { create(:project, import_type: 'gitea', creator_id: user.id, created_at: 2.months.ago) }
+
+ let_it_be(:bulk_import_projects) do
+ create_list(:bulk_import_entity, 3, source_type: 'project_entity', created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:bulk_import_groups) do
+ create_list(:bulk_import_entity, 3, source_type: 'group_entity', created_at: 3.weeks.ago)
+ end
+
+ let_it_be(:old_bulk_import_project) do
+ create(:bulk_import_entity, source_type: 'project_entity', created_at: 2.months.ago)
+ end
+
+ before do
+ allow(ApplicationRecord.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'with all time frame' do
+ let(:expected_value) { 10 }
+ let(:expected_query) do
+ "SELECT (SELECT COUNT(\"projects\".\"id\") FROM \"projects\" WHERE \"projects\".\"import_type\""\
+ " IN ('gitlab_project', 'gitlab', 'github', 'bitbucket', 'bitbucket_server', 'gitea', 'git', 'manifest',"\
+ " 'gitlab_migration'))"\
+ " + (SELECT COUNT(\"bulk_import_entities\".\"id\") FROM \"bulk_import_entities\""\
+ " WHERE \"bulk_import_entities\".\"source_type\" = 1)"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: 'all'
+ end
+
+ context 'for 28d time frame' do
+ let(:expected_value) { 8 }
+ let(:start) { 30.days.ago.to_s(:db) }
+ let(:finish) { 2.days.ago.to_s(:db) }
+ let(:expected_query) do
+ "SELECT (SELECT COUNT(\"projects\".\"id\") FROM \"projects\" WHERE \"projects\".\"import_type\""\
+ " IN ('gitlab_project', 'gitlab', 'github', 'bitbucket', 'bitbucket_server', 'gitea', 'git', 'manifest',"\
+ " 'gitlab_migration')"\
+ " AND \"projects\".\"created_at\" BETWEEN '#{start}' AND '#{finish}')"\
+ " + (SELECT COUNT(\"bulk_import_entities\".\"id\") FROM \"bulk_import_entities\""\
+ " WHERE \"bulk_import_entities\".\"source_type\" = 1 AND \"bulk_import_entities\".\"created_at\""\
+ " BETWEEN '#{start}' AND '#{finish}')"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', time_frame: '28d'
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb
new file mode 100644
index 00000000000..f7a53cd3dcc
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/jira_imports_total_imported_issues_count_metric_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::JiraImportsTotalImportedIssuesCountMetric do
+ let_it_be(:jira_import_state_1) { create(:jira_import_state, :finished, imported_issues_count: 3) }
+ let_it_be(:jira_import_state_2) { create(:jira_import_state, :finished, imported_issues_count: 2) }
+
+ let(:expected_value) { 5 }
+ let(:expected_query) do
+ 'SELECT SUM("jira_imports"."imported_issues_count") FROM "jira_imports" WHERE "jira_imports"."status" = 4'
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb
new file mode 100644
index 00000000000..180c76d56f3
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/numbers_metric_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::NumbersMetric do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.operation :add
+ metric_class.data do |time_frame|
+ [
+ Gitlab::Usage::Metrics::Instrumentations::CountIssuesMetric.new(time_frame: time_frame).value,
+ Gitlab::Usage::Metrics::Instrumentations::CountBoardsMetric.new(time_frame: time_frame).value
+ ]
+ end
+ end.new(time_frame: 'all')
+ end
+
+ describe '#value' do
+ let_it_be(:issue_1) { create(:issue) }
+ let_it_be(:issue_2) { create(:issue) }
+ let_it_be(:issue_3) { create(:issue) }
+ let_it_be(:issues) { Issue.all }
+
+ let_it_be(:board_1) { create(:board) }
+ let_it_be(:boards) { Board.all }
+
+ before do
+ allow(Issue.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'calculates a correct result' do
+ expect(subject.value).to eq(4)
+ end
+
+ context 'with availability defined' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.operation :add
+ metric_class.data { [1] }
+ metric_class.available? { false }
+ end.new(time_frame: 'all')
+ end
+
+ it 'responds to #available? properly' do
+ expect(subject.available?).to eq(false)
+ end
+ end
+
+ context 'with availability not defined' do
+ subject do
+ Class.new(described_class) do
+ operation :add
+ data { [] }
+ end.new(time_frame: 'all')
+ end
+
+ it 'responds to #available? properly' do
+ expect(subject.available?).to eq(true)
+ end
+ end
+ end
+
+ context 'with unimplemented operation method used' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.operation :invalid_operation
+ metric_class.data { [] }
+ end.new(time_frame: 'all')
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(described_class::UnimplementedOperationError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb
new file mode 100644
index 00000000000..8a0ce61de74
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/unique_active_users_metric_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UniqueActiveUsersMetric do
+ let_it_be(:user1) { create(:user, last_activity_on: 1.day.ago) }
+ let_it_be(:user2) { create(:user, last_activity_on: 5.days.ago) }
+ let_it_be(:user3) { create(:user, last_activity_on: 50.days.ago) }
+ let_it_be(:user4) { create(:user) }
+ let_it_be(:user5) { create(:user, user_type: 1, last_activity_on: 5.days.ago ) } # support bot
+ let_it_be(:user6) { create(:user, state: 'blocked') }
+
+ context '28d' do
+ let(:start) { 30.days.ago.to_date.to_s }
+ let(:finish) { 2.days.ago.to_date.to_s }
+ let(:expected_value) { 1 }
+ let(:expected_query) do
+ "SELECT COUNT(\"users\".\"id\") FROM \"users\" WHERE (\"users\".\"state\" IN ('active')) AND " \
+ "(\"users\".\"user_type\" IS NULL OR \"users\".\"user_type\" IN (6, 4)) AND \"users\".\"last_activity_on\" " \
+ "BETWEEN '#{start}' AND '#{finish}'"
+ end
+
+ it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' }
+ end
+
+ context 'all' do
+ let(:expected_value) { 4 }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all' }
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
index 6955fbcaf5a..9ee8bc6b568 100644
--- a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
@@ -71,9 +71,19 @@ RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
end
end
+ context 'for average metrics' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with average(Ci::Pipeline, :duration)
+ let(:key_path) { 'counts.ci_pipeline_duration' }
+ let(:operation) { :average }
+ let(:relation) { Ci::Pipeline }
+ let(:column) { :duration}
+ let(:name_suggestion) { /average_duration_from_ci_pipelines/ }
+ end
+ end
+
context 'for redis metrics' do
it_behaves_like 'name suggestion' do
- # corresponding metric is collected with redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) }
let(:operation) { :redis }
let(:column) { nil }
let(:relation) { nil }
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index f81ad9b193d..167dba9b57d 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -77,8 +77,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
context 'for redis metrics' do
it_behaves_like 'name suggestion' do
- # corresponding metric is collected with redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) }
- let(:key_path) { 'analytics_unique_visits.analytics_unique_visits_for_any_target' }
+ let(:key_path) { 'usage_activity_by_stage_monthly.create.merge_requests_users' }
let(:name_suggestion) { /<please fill metric name, suggested format is: {subject}_{verb}{ing|ed}_{object} eg: users_creating_epics or merge_requests_viewed_in_single_file_mode>/ }
end
end
diff --git a/spec/lib/gitlab/usage/metrics/query_spec.rb b/spec/lib/gitlab/usage/metrics/query_spec.rb
index 65b8a7a046b..355d619f768 100644
--- a/spec/lib/gitlab/usage/metrics/query_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/query_spec.rb
@@ -61,6 +61,12 @@ RSpec.describe Gitlab::Usage::Metrics::Query do
end
end
+ describe '.average' do
+ it 'returns the raw SQL' do
+ expect(described_class.for(:average, Issue, :weight)).to eq('SELECT AVG("issues"."weight") FROM "issues"')
+ end
+ end
+
describe 'estimate_batch_distinct_count' do
it 'returns the raw SQL' do
expect(described_class.for(:estimate_batch_distinct_count, Issue, :author_id)).to eq('SELECT COUNT(DISTINCT "issues"."author_id") FROM "issues"')
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index e007554df4a..1e8f9db4dea 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -155,6 +155,11 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c
memoized_constatns += Gitlab::UsageData::EE_MEMOIZED_VALUES if defined? Gitlab::UsageData::EE_MEMOIZED_VALUES
memoized_constatns.each { |v| Gitlab::UsageData.clear_memoization(v) }
stub_database_flavor_check('Cloud SQL for PostgreSQL')
+
+ # in_product_marketing_email metrics values are extracted from a single group by query
+ # to check if the queries for individual metrics return the same value as group by when the value is non-zero
+ create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.current)
+ create(:in_product_marketing_email, track: :verify, series: 0)
end
let(:service_ping_payload) { described_class.for(output: :all_metrics_values) }
diff --git a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
index c4a84445a01..01396602f29 100644
--- a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
# If this spec fails, we need to add the new code review event to the correct aggregated metric
+# NOTE: ONLY user related metrics to be added to the aggregates - otherwise add it to the exception list
RSpec.describe 'Code review events' do
it 'the aggregated metrics contain all the code review metrics' do
path = Rails.root.join('config/metrics/aggregates/code_review.yml')
@@ -15,7 +16,7 @@ RSpec.describe 'Code review events' do
code_review_events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category("code_review")
- exceptions = %w[i_code_review_mr_diffs i_code_review_mr_single_file_diffs i_code_review_total_suggestions_applied i_code_review_total_suggestions_added]
+ exceptions = %w[i_code_review_mr_diffs i_code_review_mr_with_invalid_approvers i_code_review_mr_single_file_diffs i_code_review_total_suggestions_applied i_code_review_total_suggestions_added i_code_review_create_note_in_ipynb_diff i_code_review_create_note_in_ipynb_diff_mr i_code_review_create_note_in_ipynb_diff_commit]
code_review_aggregated_events += exceptions
expect(code_review_events - code_review_aggregated_events).to be_empty
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 9aecb8f8b25..dbc34681660 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -66,18 +66,6 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
end
- context 'for SSE edit actions' do
- it_behaves_like 'tracks and counts action' do
- def track_action(params)
- described_class.track_sse_edit_action(**params)
- end
-
- def count_unique(params)
- described_class.count_sse_edit_actions(**params)
- end
- end
- end
-
it 'can return the count of actions per user deduplicated' do
described_class.track_web_ide_edit_action(author: user1)
described_class.track_live_preview_edit_action(author: user1)
diff --git a/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
deleted file mode 100644
index 1bf5dad1c9f..00000000000
--- a/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UsageDataCounters::StaticSiteEditorCounter do
- it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :views
- it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :commits
- it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :merge_requests
-
- it_behaves_like 'a redis usage counter with totals', :static_site_editor,
- views: 3,
- commits: 4,
- merge_requests: 5
-end
diff --git a/spec/lib/gitlab/usage_data_counters_spec.rb b/spec/lib/gitlab/usage_data_counters_spec.rb
index 379a2cb778d..0696b375eb5 100644
--- a/spec/lib/gitlab/usage_data_counters_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters_spec.rb
@@ -13,10 +13,10 @@ RSpec.describe Gitlab::UsageDataCounters do
describe '.count' do
subject { described_class.count(event_name) }
- let(:event_name) { 'static_site_editor_views' }
+ let(:event_name) { 'web_ide_views' }
it 'increases a view counter' do
- expect(Gitlab::UsageDataCounters::StaticSiteEditorCounter).to receive(:count).with('views')
+ expect(Gitlab::UsageDataCounters::WebIdeCounter).to receive(:count).with('views')
subject
end
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
index 563eed75c38..485f2131d87 100644
--- a/spec/lib/gitlab/usage_data_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -24,11 +24,8 @@ RSpec.describe Gitlab::UsageDataMetrics do
expect(subject).to include(:hostname)
end
- it 'includes counts keys' do
+ it 'includes counts keys', :aggregate_failures do
expect(subject[:counts]).to include(:boards)
- end
-
- it 'includes counts keys' do
expect(subject[:counts]).to include(:issues)
end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 7c64a31c499..2fe43c11d27 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -105,4 +105,25 @@ RSpec.describe Gitlab::UsageDataQueries do
expect(described_class.maximum_id(Project)).to eq(nil)
end
end
+
+ describe 'sent_in_product_marketing_email_count' do
+ it 'returns sql query that returns correct value' do
+ expect(described_class.sent_in_product_marketing_email_count(nil, 0, 0)).to eq(
+ 'SELECT COUNT("in_product_marketing_emails"."id") ' \
+ 'FROM "in_product_marketing_emails" ' \
+ 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0'
+ )
+ end
+ end
+
+ describe 'clicked_in_product_marketing_email_count' do
+ it 'returns sql query that returns correct value' do
+ expect(described_class.clicked_in_product_marketing_email_count(nil, 0, 0)).to eq(
+ 'SELECT COUNT("in_product_marketing_emails"."id") ' \
+ 'FROM "in_product_marketing_emails" ' \
+ 'WHERE "in_product_marketing_emails"."track" = 0 AND "in_product_marketing_emails"."series" = 0 ' \
+ 'AND "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL'
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 7edec6d13f4..790f5b638b9 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -723,7 +723,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(counts_monthly[:projects_with_alerts_created]).to eq(1)
expect(counts_monthly[:projects]).to eq(1)
expect(counts_monthly[:packages]).to eq(1)
- expect(counts_monthly[:promoted_issues]).to eq(Gitlab::UsageData::DEPRECATED_VALUE)
end
end
@@ -755,7 +754,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it { is_expected.to include(:kubernetes_agent_gitops_sync) }
it { is_expected.to include(:kubernetes_agent_k8s_api_proxy_request) }
- it { is_expected.to include(:static_site_editor_views) }
it { is_expected.to include(:package_events_i_package_pull_package) }
it { is_expected.to include(:package_events_i_package_delete_package_by_user) }
it { is_expected.to include(:package_events_i_package_conan_push_package) }
@@ -1188,12 +1186,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
counter.track_web_ide_edit_action(author: user3, time: time - 3.days)
counter.track_snippet_editor_edit_action(author: user3)
-
- counter.track_sse_edit_action(author: user1)
- counter.track_sse_edit_action(author: user1)
- counter.track_sse_edit_action(author: user2)
- counter.track_sse_edit_action(author: user3)
- counter.track_sse_edit_action(author: user2, time: time - 3.days)
end
it 'returns the distinct count of user actions within the specified time period' do
@@ -1206,108 +1198,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
action_monthly_active_users_web_ide_edit: 2,
action_monthly_active_users_sfe_edit: 2,
action_monthly_active_users_snippet_editor_edit: 2,
- action_monthly_active_users_ide_edit: 3,
- action_monthly_active_users_sse_edit: 3
+ action_monthly_active_users_ide_edit: 3
}
)
end
end
- describe '.analytics_unique_visits_data' do
- subject { described_class.analytics_unique_visits_data }
-
- it 'returns the number of unique visits to pages with analytics features' do
- ::Gitlab::Analytics::UniqueVisits.analytics_events.each do |target|
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: target).and_return(123)
- end
-
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics).and_return(543)
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
-
- expect(subject).to eq({
- analytics_unique_visits: {
- 'g_analytics_contribution' => 123,
- 'g_analytics_insights' => 123,
- 'g_analytics_issues' => 123,
- 'g_analytics_productivity' => 123,
- 'g_analytics_valuestream' => 123,
- 'p_analytics_pipelines' => 123,
- 'p_analytics_code_reviews' => 123,
- 'p_analytics_valuestream' => 123,
- 'p_analytics_insights' => 123,
- 'p_analytics_issues' => 123,
- 'p_analytics_repo' => 123,
- 'i_analytics_cohorts' => 123,
- 'i_analytics_dev_ops_score' => 123,
- 'i_analytics_instance_statistics' => 123,
- 'p_analytics_ci_cd_deployment_frequency' => 123,
- 'p_analytics_ci_cd_lead_time' => 123,
- 'p_analytics_ci_cd_pipelines' => 123,
- 'p_analytics_merge_request' => 123,
- 'i_analytics_dev_ops_adoption' => 123,
- 'users_viewing_analytics_group_devops_adoption' => 123,
- 'analytics_unique_visits_for_any_target' => 543,
- 'analytics_unique_visits_for_any_target_monthly' => 987
- }
- })
- end
- end
-
- describe '.compliance_unique_visits_data' do
- subject { described_class.compliance_unique_visits_data }
-
- before do
- allow_next_instance_of(::Gitlab::Analytics::UniqueVisits) do |instance|
- ::Gitlab::Analytics::UniqueVisits.compliance_events.each do |target|
- allow(instance).to receive(:unique_visits_for).with(targets: target).and_return(123)
- end
-
- allow(instance).to receive(:unique_visits_for).with(targets: :compliance).and_return(543)
-
- allow(instance).to receive(:unique_visits_for).with(targets: :compliance, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
- end
- end
-
- it 'returns the number of unique visits to pages with compliance features' do
- expect(subject).to eq({
- compliance_unique_visits: {
- 'g_compliance_dashboard' => 123,
- 'g_compliance_audit_events' => 123,
- 'i_compliance_credential_inventory' => 123,
- 'i_compliance_audit_events' => 123,
- 'a_compliance_audit_events_api' => 123,
- 'compliance_unique_visits_for_any_target' => 543,
- 'compliance_unique_visits_for_any_target_monthly' => 987
- }
- })
- end
- end
-
- describe '.search_unique_visits_data' do
- subject { described_class.search_unique_visits_data }
-
- before do
- events = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category('search')
- events.each do |event|
- allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: event, start_date: 7.days.ago.to_date, end_date: Date.current).and_return(123)
- end
- allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: events, start_date: 7.days.ago.to_date, end_date: Date.current).and_return(543)
- allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: events, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
- end
-
- it 'returns the number of unique visits to pages with search features' do
- expect(subject).to eq({
- search_unique_visits: {
- 'i_search_total' => 123,
- 'i_search_advanced' => 123,
- 'i_search_paid' => 123,
- 'search_unique_visits_for_any_target_weekly' => 543,
- 'search_unique_visits_for_any_target_monthly' => 987
- }
- })
- end
- end
-
describe 'redis_hll_counters' do
subject { described_class.redis_hll_counters }
@@ -1497,4 +1393,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
end
+
+ context 'on Gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ describe '.system_usage_data' do
+ subject { described_class.system_usage_data }
+
+ it 'returns fallback value for disabled metrics' do
+ expect(subject[:counts][:ci_internal_pipelines]).to eq(Gitlab::Utils::UsageData::FALLBACK)
+ expect(subject[:counts][:issues_created_gitlab_alerts]).to eq(Gitlab::Utils::UsageData::FALLBACK)
+ expect(subject[:counts][:issues_created_manually_from_alerts]).to eq(Gitlab::Utils::UsageData::FALLBACK)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index a74a9f06c6f..25ba5a3e09e 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -259,6 +259,37 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
+ describe '#average' do
+ let(:relation) { double(:relation) }
+
+ it 'returns the average when operation succeeds' do
+ allow(Gitlab::Database::BatchCount)
+ .to receive(:batch_average)
+ .with(relation, :column, batch_size: 100, start: 2, finish: 3)
+ .and_return(1)
+
+ expect(described_class.average(relation, :column, batch_size: 100, start: 2, finish: 3)).to eq(1)
+ end
+
+ it 'records duration' do
+ expect(described_class).to receive(:with_duration)
+
+ allow(Gitlab::Database::BatchCount).to receive(:batch_average).and_return(1)
+
+ described_class.average(relation, :column)
+ end
+
+ context 'when operation fails' do
+ subject { described_class.average(relation, :column) }
+
+ let(:fallback) { 15 }
+ let(:failing_class) { Gitlab::Database::BatchCount }
+ let(:failing_method) { :batch_average }
+
+ it_behaves_like 'failing hardening method'
+ end
+ end
+
describe '#histogram' do
let_it_be(:projects) { create_list(:project, 3) }
diff --git a/spec/lib/gitlab/web_hooks/rate_limiter_spec.rb b/spec/lib/gitlab/web_hooks/rate_limiter_spec.rb
new file mode 100644
index 00000000000..b25ce4ea9da
--- /dev/null
+++ b/spec/lib/gitlab/web_hooks/rate_limiter_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::WebHooks::RateLimiter, :clean_gitlab_redis_rate_limiting do
+ let_it_be(:plan) { create(:default_plan) }
+ let_it_be_with_reload(:project_hook) { create(:project_hook) }
+ let_it_be_with_reload(:system_hook) { create(:system_hook) }
+ let_it_be_with_reload(:integration_hook) { create(:jenkins_integration).service_hook }
+ let_it_be(:limit) { 1 }
+
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#rate_limit!' do
+ def rate_limit!(hook)
+ described_class.new(hook).rate_limit!
+ end
+
+ shared_examples 'a hook that is never rate limited' do
+ specify do
+ expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+
+ expect(rate_limit!(hook)).to eq(false)
+ end
+ end
+
+ context 'when there is no plan limit' do
+ where(:hook) { [ref(:project_hook), ref(:system_hook), ref(:integration_hook)] }
+
+ with_them { it_behaves_like 'a hook that is never rate limited' }
+ end
+
+ context 'when there is a plan limit' do
+ before_all do
+ create(:plan_limits, plan: plan, web_hook_calls: limit)
+ end
+
+ where(:hook, :limitless_hook_type) do
+ ref(:project_hook) | false
+ ref(:system_hook) | true
+ ref(:integration_hook) | true
+ end
+
+ with_them do
+ if params[:limitless_hook_type]
+ it_behaves_like 'a hook that is never rate limited'
+ else
+ it 'rate limits the hook, returning true when rate limited' do
+ expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?)
+ .exactly(3).times
+ .and_call_original
+
+ freeze_time do
+ limit.times { expect(rate_limit!(hook)).to eq(false) }
+ expect(rate_limit!(hook)).to eq(true)
+ end
+
+ travel_to(1.day.from_now) do
+ expect(rate_limit!(hook)).to eq(false)
+ end
+ end
+ end
+ end
+ end
+
+ describe 'rate limit scope' do
+ it 'rate limits all hooks from the same namespace', :freeze_time do
+ create(:plan_limits, plan: plan, web_hook_calls: limit)
+ project_hook_in_different_namespace = create(:project_hook)
+ project_hook_in_same_namespace = create(:project_hook,
+ project: create(:project, namespace: project_hook.project.namespace)
+ )
+
+ limit.times { expect(rate_limit!(project_hook)).to eq(false) }
+ expect(rate_limit!(project_hook)).to eq(true)
+ expect(rate_limit!(project_hook_in_same_namespace)).to eq(true)
+ expect(rate_limit!(project_hook_in_different_namespace)).to eq(false)
+ end
+ end
+ end
+
+ describe '#rate_limited?' do
+ subject { described_class.new(hook).rate_limited? }
+
+ context 'when no plan limit has been defined' do
+ where(:hook) { [ref(:project_hook), ref(:system_hook), ref(:integration_hook)] }
+
+ with_them do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when there is a plan limit' do
+ before_all do
+ create(:plan_limits, plan: plan, web_hook_calls: limit)
+ end
+
+ context 'when hook is not rate-limited' do
+ where(:hook) { [ref(:project_hook), ref(:system_hook), ref(:integration_hook)] }
+
+ with_them do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ context 'when hook is rate-limited' do
+ before do
+ allow(Gitlab::ApplicationRateLimiter).to receive(:throttled?).and_return(true)
+ end
+
+ where(:hook, :limitless_hook_type) do
+ ref(:project_hook) | false
+ ref(:system_hook) | true
+ ref(:integration_hook) | true
+ end
+
+ with_them do
+ it { is_expected.to eq(!limitless_hook_type) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index 53048ae2e6b..693b7bd45c9 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe 'Marginalia spec' do
"application" => "test",
"endpoint_id" => "MarginaliaTestController#first_user",
"correlation_id" => correlation_id,
- "db_config_name" => ENV['GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci'] == 'main' ? 'main' : 'ci'
+ "db_config_name" => 'ci'
}
end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 1629aec89f5..18a58522d12 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -342,68 +342,84 @@ RSpec.describe ObjectStorage::DirectUpload do
context 'when length is unknown' do
let(:has_length) { false }
- it_behaves_like 'a valid S3 upload with multipart data' do
- before do
- stub_object_storage_multipart_init(storage_url, "myUpload")
+ context 'when s3_omit_multipart_urls feature flag is enabled' do
+ let(:consolidated_settings) { true }
+
+ it 'omits multipart URLs' do
+ expect(subject).not_to have_key(:MultipartUpload)
end
- context 'when maximum upload size is 0' do
- let(:maximum_size) { 0 }
+ it_behaves_like 'a valid upload'
+ end
- it 'returns maximum number of parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
- end
+ context 'when s3_omit_multipart_urls feature flag is disabled' do
+ before do
+ stub_feature_flags(s3_omit_multipart_urls: false)
+ end
- it 'part size is minimum, 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it_behaves_like 'a valid S3 upload with multipart data' do
+ before do
+ stub_object_storage_multipart_init(storage_url, "myUpload")
end
- end
- context 'when maximum upload size is < 5 MB' do
- let(:maximum_size) { 1024 }
+ context 'when maximum upload size is 0' do
+ let(:maximum_size) { 0 }
- it 'returns only 1 part' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(1)
- end
+ it 'returns maximum number of parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
+ end
- it 'part size is minimum, 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it 'part size is minimum, 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- end
- context 'when maximum upload size is 10MB' do
- let(:maximum_size) { 10.megabyte }
+ context 'when maximum upload size is < 5 MB' do
+ let(:maximum_size) { 1024 }
- it 'returns only 2 parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(2)
- end
+ it 'returns only 1 part' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(1)
+ end
- it 'part size is minimum, 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it 'part size is minimum, 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- end
- context 'when maximum upload size is 12MB' do
- let(:maximum_size) { 12.megabyte }
+ context 'when maximum upload size is 10MB' do
+ let(:maximum_size) { 10.megabyte }
- it 'returns only 3 parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(3)
- end
+ it 'returns only 2 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(2)
+ end
- it 'part size is rounded-up to 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ it 'part size is minimum, 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- end
- context 'when maximum upload size is 49GB' do
- let(:maximum_size) { 49.gigabyte }
+ context 'when maximum upload size is 12MB' do
+ let(:maximum_size) { 12.megabyte }
+
+ it 'returns only 3 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(3)
+ end
- it 'returns maximum, 100 parts' do
- expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
+ it 'part size is rounded-up to 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(5.megabyte)
+ end
end
- it 'part size is rounded-up to 5MB' do
- expect(subject[:MultipartUpload][:PartSize]).to eq(505.megabyte)
+ context 'when maximum upload size is 49GB' do
+ let(:maximum_size) { 49.gigabyte }
+
+ it 'returns maximum, 100 parts' do
+ expect(subject[:MultipartUpload][:PartURLs].length).to eq(100)
+ end
+
+ it 'part size is rounded-up to 5MB' do
+ expect(subject[:MultipartUpload][:PartSize]).to eq(505.megabyte)
+ end
end
end
end
diff --git a/spec/lib/security/ci_configuration/sast_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
index efb8b0b9984..611a886d252 100644
--- a/spec/lib/security/ci_configuration/sast_build_action_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
@@ -4,54 +4,54 @@ require 'spec_helper'
RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:default_sast_values) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'registry.gitlab.com/security-products' }
],
- 'pipeline' =>
+ pipeline:
[
- { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'test' },
- { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 4 },
- { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec, test, tests, tmp' }
+ { field: 'stage', default_value: 'test', value: 'test' },
+ { field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 4 },
+ { field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec, test, tests, tmp' }
] }
end
let(:params) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'new_registry' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'new_registry' }
],
- 'pipeline' =>
+ pipeline:
[
- { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'security' },
- { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 1 },
- { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' }
+ { field: 'stage', default_value: 'test', value: 'security' },
+ { field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 1 },
+ { field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec,docs' }
] }
end
let(:params_with_analyzer_info) do
- params.merge( { 'analyzers' =>
+ params.merge( { analyzers:
[
{
- 'name' => "bandit",
- 'enabled' => false
+ name: "bandit",
+ enabled: false
},
{
- 'name' => "brakeman",
- 'enabled' => true,
- 'variables' => [
- { 'field' => "SAST_BRAKEMAN_LEVEL",
- 'defaultValue' => "1",
- 'value' => "2" }
+ name: "brakeman",
+ enabled: true,
+ variables: [
+ { field: "SAST_BRAKEMAN_LEVEL",
+ default_value: "1",
+ value: "2" }
]
},
{
- 'name' => "flawfinder",
- 'enabled' => true,
- 'variables' => [
- { 'field' => "SAST_FLAWFINDER_LEVEL",
- 'defaultValue' => "1",
- 'value' => "1" }
+ name: "flawfinder",
+ enabled: true,
+ variables: [
+ { field: "SAST_FLAWFINDER_LEVEL",
+ default_value: "1",
+ value: "1" }
]
}
] }
@@ -59,15 +59,15 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
end
let(:params_with_all_analyzers_enabled) do
- params.merge( { 'analyzers' =>
+ params.merge( { analyzers:
[
{
- 'name' => "flawfinder",
- 'enabled' => true
+ name: "flawfinder",
+ enabled: true
},
{
- 'name' => "brakeman",
- 'enabled' => true
+ name: "brakeman",
+ enabled: true
}
] }
)
@@ -162,15 +162,15 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
context 'with update stage and SEARCH_MAX_DEPTH and set SECURE_ANALYZERS_PREFIX to default' do
let(:params) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => 'registry.gitlab.com/security-products' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'registry.gitlab.com/security-products' }
],
- 'pipeline' =>
+ pipeline:
[
- { 'field' => 'stage', 'defaultValue' => 'test', 'value' => 'brand_new_stage' },
- { 'field' => 'SEARCH_MAX_DEPTH', 'defaultValue' => 4, 'value' => 5 },
- { 'field' => 'SAST_EXCLUDED_PATHS', 'defaultValue' => 'spec, test, tests, tmp', 'value' => 'spec,docs' }
+ { field: 'stage', default_value: 'test', value: 'brand_new_stage' },
+ { field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 5 },
+ { field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec,docs' }
] }
end
@@ -273,9 +273,9 @@ RSpec.describe Security::CiConfiguration::SastBuildAction do
context 'with one empty parameter' do
let(:params) do
- { 'global' =>
+ { global:
[
- { 'field' => 'SECURE_ANALYZERS_PREFIX', 'defaultValue' => 'registry.gitlab.com/security-products', 'value' => '' }
+ { field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: '' }
] }
end
diff --git a/spec/lib/service_ping/build_payload_spec.rb b/spec/lib/service_ping/build_payload_spec.rb
index 6cce07262b2..b10c9fd5bc0 100644
--- a/spec/lib/service_ping/build_payload_spec.rb
+++ b/spec/lib/service_ping/build_payload_spec.rb
@@ -14,35 +14,6 @@ RSpec.describe ServicePing::BuildPayload do
end
end
- context 'when usage_ping_enabled setting is false' do
- before do
- # Gitlab::CurrentSettings.usage_ping_enabled? == false
- stub_config_setting(usage_ping_enabled: false)
- end
-
- it 'returns empty service ping payload' do
- expect(service_ping_payload).to eq({})
- end
- end
-
- context 'when usage_ping_enabled setting is true' do
- before do
- # Gitlab::CurrentSettings.usage_ping_enabled? == true
- stub_config_setting(usage_ping_enabled: true)
- end
-
- it_behaves_like 'complete service ping payload'
-
- context 'with require stats consent enabled' do
- before do
- allow(User).to receive(:single_user)
- .and_return(instance_double(User, :user, requires_usage_stats_consent?: true))
- end
-
- it 'returns empty service ping payload' do
- expect(service_ping_payload).to eq({})
- end
- end
- end
+ it_behaves_like 'complete service ping payload'
end
end
diff --git a/spec/lib/service_ping/permit_data_categories_spec.rb b/spec/lib/service_ping/permit_data_categories_spec.rb
index d1027a6f1ab..a4b88531205 100644
--- a/spec/lib/service_ping/permit_data_categories_spec.rb
+++ b/spec/lib/service_ping/permit_data_categories_spec.rb
@@ -19,26 +19,10 @@ RSpec.describe ServicePing::PermitDataCategories do
end
context 'when usage ping setting is set to false' do
- before do
- allow(User).to receive(:single_user)
- .and_return(instance_double(User, :user, requires_usage_stats_consent?: false))
+ it 'returns all categories' do
stub_config_setting(usage_ping_enabled: false)
- end
-
- it 'returns no categories' do
- expect(permitted_categories).to match_array([])
- end
- end
- context 'when User.single_user&.requires_usage_stats_consent? is required' do
- before do
- allow(User).to receive(:single_user)
- .and_return(instance_double(User, :user, requires_usage_stats_consent?: true))
- stub_config_setting(usage_ping_enabled: true)
- end
-
- it 'returns no categories' do
- expect(permitted_categories).to match_array([])
+ expect(permitted_categories).to match_array(%w[standard subscription operational optional])
end
end
end