summaryrefslogtreecommitdiff
path: root/spec/lib/gitlab
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb35
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb1
-rw-r--r--spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb69
-rw-r--r--spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb98
-rw-r--r--spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb63
-rw-r--r--spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb26
-rw-r--r--spec/lib/gitlab/badge/coverage/report_spec.rb103
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb83
-rw-r--r--spec/lib/gitlab/bulk_import/client_spec.rb95
-rw-r--r--spec/lib/gitlab/chat/output_spec.rb99
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb42
-rw-r--r--spec/lib/gitlab/ci/charts_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb128
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/variables_spec.rb71
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb56
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb77
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb78
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb63
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb123
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb78
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb66
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb63
-rw-r--r--spec/lib/gitlab/ci/reports/test_case_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/reports/test_failure_history_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/reports/test_reports_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb146
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/runner_instructions_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb59
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/variables/collection/item_spec.rb4
-rw-r--r--spec/lib/gitlab/config/entry/simplifiable_spec.rb6
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb45
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb2
-rw-r--r--spec/lib/gitlab/danger/commit_linter_spec.rb7
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb23
-rw-r--r--spec/lib/gitlab/data_builder/feature_flag_spec.rb25
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb23
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning/replace_table_spec.rb113
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb186
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb71
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb44
-rw-r--r--spec/lib/gitlab/database/postgres_partition_spec.rb75
-rw-r--r--spec/lib/gitlab/database/postgres_partitioned_table_spec.rb98
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb19
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb98
-rw-r--r--spec/lib/gitlab/etag_caching/middleware_spec.rb48
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb8
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb8
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb438
-rw-r--r--spec/lib/gitlab/experimentation/group_types_spec.rb13
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb443
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb14
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb129
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/sequential_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import_spec.rb57
-rw-r--r--spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb6
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb226
-rw-r--r--spec/lib/gitlab/graphql/lazy_spec.rb96
-rw-r--r--spec/lib/gitlab/graphql/loaders/batch_model_loader_spec.rb23
-rw-r--r--spec/lib/gitlab/hook_data/release_builder_spec.rb49
-rw-r--r--spec/lib/gitlab/i18n/po_linter_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml6
-rw-r--r--spec/lib/gitlab/import_export/attributes_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/importer_spec.rb20
-rw-r--r--spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb18
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/lfs_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb168
-rw-r--r--spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb (renamed from spec/lib/gitlab/import_export/project/sample/sample_data_relation_tree_restorer_spec.rb)23
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb44
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml26
-rw-r--r--spec/lib/gitlab/import_export/uploads_manager_spec.rb24
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb10
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb2
-rw-r--r--spec/lib/gitlab/json_spec.rb518
-rw-r--r--spec/lib/gitlab/kubernetes/helm/api_spec.rb4
-rw-r--r--spec/lib/gitlab/kubernetes/helm/pod_spec.rb121
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb50
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v2/certificate_spec.rb (renamed from spec/lib/gitlab/kubernetes/helm/certificate_spec.rb)2
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v2/delete_command_spec.rb (renamed from spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb)2
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v2/init_command_spec.rb (renamed from spec/lib/gitlab/kubernetes/helm/init_command_spec.rb)2
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v2/install_command_spec.rb (renamed from spec/lib/gitlab/kubernetes/helm/install_command_spec.rb)33
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v2/patch_command_spec.rb (renamed from spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb)29
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v2/reset_command_spec.rb (renamed from spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb)2
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v3/base_command_spec.rb (renamed from spec/lib/gitlab/kubernetes/helm/base_command_spec.rb)10
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb35
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb168
-rw-r--r--spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb81
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb32
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb5
-rw-r--r--spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb118
-rw-r--r--spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb182
-rw-r--r--spec/lib/gitlab/middleware/handle_null_bytes_spec.rb88
-rw-r--r--spec/lib/gitlab/middleware/read_only_spec.rb202
-rw-r--r--spec/lib/gitlab/omniauth_initializer_spec.rb28
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb39
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb16
-rw-r--r--spec/lib/gitlab/redis/wrapper_spec.rb6
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb9
-rw-r--r--spec/lib/gitlab/relative_positioning/mover_spec.rb1
-rw-r--r--spec/lib/gitlab/repository_size_checker_spec.rb6
-rw-r--r--spec/lib/gitlab/repository_size_error_message_spec.rb14
-rw-r--r--spec/lib/gitlab/robots_txt/parser_spec.rb15
-rw-r--r--spec/lib/gitlab/search/sort_options_spec.rb34
-rw-r--r--spec/lib/gitlab/sidekiq_cluster/cli_spec.rb165
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb20
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb109
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb64
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb20
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb144
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb4
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb20
-rw-r--r--spec/lib/gitlab/throttle_spec.rb18
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_spec.rb78
-rw-r--r--spec/lib/gitlab/tracking/incident_management_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking_spec.rb106
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb30
-rw-r--r--spec/lib/gitlab/url_blockers/domain_allowlist_entry_spec.rb58
-rw-r--r--spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb58
-rw-r--r--spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb75
-rw-r--r--spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb75
-rw-r--r--spec/lib/gitlab/url_blockers/url_allowlist_spec.rb164
-rw-r--r--spec/lib/gitlab/url_blockers/url_whitelist_spec.rb164
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb57
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb324
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb138
-rw-r--r--spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb6
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb174
-rw-r--r--spec/lib/gitlab/with_feature_category_spec.rb69
156 files changed, 5961 insertions, 2873 deletions
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
index fe390289ef6..52e9f2d9846 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::CodeStageStart do
let(:subject) { described_class.new({}) }
let(:project) { create(:project) }
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
it 'needs connection with an issue via merge_requests_closing_issues table' do
issue = create(:issue, project: project)
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::CodeStageStart do
other_merge_request = create(:merge_request, source_project: project, source_branch: 'a', target_branch: 'master')
- records = subject.apply_query_customization(MergeRequest.all)
+ records = subject.apply_query_customization(MergeRequest.all).where('merge_requests_closing_issues.issue_id IS NOT NULL')
expect(records).to eq([merge_request])
expect(records).not_to include(other_merge_request)
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb
index 5cc6b05407f..224a18653ed 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_created_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueCreated do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb
index 715ad5a8e7d..bc0e388cf53 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_first_mentioned_in_commit_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueFirstMentionedInCommit do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb
index 56241194f36..ddc5f015a8c 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/issue_stage_end_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::IssueStageEnd do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb
index f3202eab5bb..281cc31c9e0 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_created_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestCreated do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb
index 03b0ccfae43..e1dd2e56e2b 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_first_deployed_to_production_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestFirstDeployedToProduction do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb
index b0c003e6f2a..51324966f26 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_finished_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestLastBuildFinished do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb
index 8f9aaf6f463..10dcaf23b81 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_last_build_started_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestLastBuildStarted do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb
index f1d2ca9f36e..6e20eb73ed9 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/merge_request_merged_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::MergeRequestMerged do
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb
index 3248af524bd..b8c68003127 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/plan_stage_start_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::PlanStageStart do
let(:subject) { described_class.new({}) }
let(:project) { create(:project) }
- it_behaves_like 'cycle analytics event'
+ it_behaves_like 'value stream analytics event'
it 'filters issues where first_associated_with_milestone_at or first_added_to_board_at is filled' do
issue1 = create(:issue, project: project)
diff --git a/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb b/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb
index d232e509e00..115c8145f59 100644
--- a/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb
@@ -42,5 +42,40 @@ RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do
])
end
end
+
+ context 'when custom min and max queries are present' do
+ let(:min_id) { User.second.id }
+ let(:max_id) { User.maximum(:id) }
+ let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:users) }
+
+ before do
+ create_list(:user, 2)
+
+ min_max_queries = {
+ ::Analytics::InstanceStatistics::Measurement.identifiers[:users] => {
+ minimum_query: -> { min_id },
+ maximum_query: -> { max_id }
+ }
+ }
+
+ allow(::Analytics::InstanceStatistics::Measurement).to receive(:identifier_min_max_queries) { min_max_queries }
+ end
+
+ subject do
+ described_class.new(measurement_identifiers: [users_measurement_identifier], recorded_at: recorded_at)
+ .execute
+ end
+
+ it 'uses custom min/max for ids' do
+ expect(subject).to eq([
+ [
+ users_measurement_identifier,
+ min_id,
+ max_id,
+ recorded_at
+ ]
+ ])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 2ebde145bfd..3c19ef0bd1b 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -607,6 +607,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
set_basic_auth_header(username, build.token)
is_expected.to eq user
+ expect(@current_authenticated_job).to eq build
end
it 'raises error with invalid token' do
diff --git a/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb b/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb
new file mode 100644
index 00000000000..4bf59a02a31
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillDesignInternalIds, :migration, schema: 20201030203854 do
+ subject { described_class.new(designs) }
+
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:designs) { table(:design_management_designs) }
+
+ let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:project_2) { projects.create!(namespace_id: namespace.id) }
+
+ def create_design!(proj = project)
+ designs.create!(project_id: proj.id, filename: generate(:filename))
+ end
+
+ def migrate!
+ relation = designs.where(project_id: [project.id, project_2.id]).select(:project_id).distinct
+
+ subject.perform(relation)
+ end
+
+ it 'backfills the iid for designs' do
+ 3.times { create_design! }
+
+ expect do
+ migrate!
+ end.to change { designs.pluck(:iid) }.from(contain_exactly(nil, nil, nil)).to(contain_exactly(1, 2, 3))
+ end
+
+ it 'scopes IIDs and handles range and starting-point correctly' do
+ create_design!.update!(iid: 10)
+ create_design!.update!(iid: 12)
+ create_design!(project_2).update!(iid: 7)
+ project_3 = projects.create!(namespace_id: namespace.id)
+
+ 2.times { create_design! }
+ 2.times { create_design!(project_2) }
+ 2.times { create_design!(project_3) }
+
+ migrate!
+
+ expect(designs.where(project_id: project.id).pluck(:iid)).to contain_exactly(10, 12, 13, 14)
+ expect(designs.where(project_id: project_2.id).pluck(:iid)).to contain_exactly(7, 8, 9)
+ expect(designs.where(project_id: project_3.id).pluck(:iid)).to contain_exactly(nil, nil)
+ end
+
+ it 'updates the internal ID records' do
+ design = create_design!
+ 2.times { create_design! }
+ design.update!(iid: 10)
+ scope = { project_id: project.id }
+ usage = :design_management_designs
+ init = ->(_d, _s) { 0 }
+
+ ::InternalId.track_greatest(design, scope, usage, 10, init)
+
+ migrate!
+
+ next_iid = ::InternalId.generate_next(design, scope, usage, init)
+
+ expect(designs.pluck(:iid)).to contain_exactly(10, 11, 12)
+ expect(design.reload.iid).to eq(10)
+ expect(next_iid).to eq(13)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
new file mode 100644
index 00000000000..7fe82420364
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20201028182809 do
+ let_it_be(:jira_service_temp) { described_class::JiraServiceTemp }
+ let_it_be(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
+ let_it_be(:atlassian_host) { 'https://api.atlassian.net' }
+ let_it_be(:mixedcase_host) { 'https://api.AtlassiaN.nEt' }
+ let_it_be(:server_host) { 'https://my.server.net' }
+
+ let(:jira_service) { jira_service_temp.create!(type: 'JiraService', active: true, category: 'issue_tracker') }
+
+ subject { described_class.new }
+
+ def create_tracker_data(options = {})
+ jira_tracker_data_temp.create!({ service_id: jira_service.id }.merge(options))
+ end
+
+ describe '#perform' do
+ context do
+ it 'ignores if deployment already set' do
+ tracker_data = create_tracker_data(url: atlassian_host, deployment_type: 'server')
+
+ expect(subject).not_to receive(:collect_deployment_type)
+
+ subject.perform(tracker_data.id, tracker_data.id)
+
+ expect(tracker_data.reload.deployment_type).to eq 'server'
+ end
+
+ it 'ignores if no url is set' do
+ tracker_data = create_tracker_data(deployment_type: 'unknown')
+
+ expect(subject).to receive(:collect_deployment_type)
+
+ subject.perform(tracker_data.id, tracker_data.id)
+
+ expect(tracker_data.reload.deployment_type).to eq 'unknown'
+ end
+ end
+
+ context 'when tracker is valid' do
+ let!(:tracker_1) { create_tracker_data(url: atlassian_host, deployment_type: 0) }
+ let!(:tracker_2) { create_tracker_data(url: mixedcase_host, deployment_type: 0) }
+ let!(:tracker_3) { create_tracker_data(url: server_host, deployment_type: 0) }
+ let!(:tracker_4) { create_tracker_data(api_url: server_host, deployment_type: 0) }
+ let!(:tracker_nextbatch) { create_tracker_data(api_url: atlassian_host, deployment_type: 0) }
+
+ it 'sets the proper deployment_type', :aggregate_failures do
+ subject.perform(tracker_1.id, tracker_4.id)
+
+ expect(tracker_1.reload.deployment_cloud?).to be_truthy
+ expect(tracker_2.reload.deployment_cloud?).to be_truthy
+ expect(tracker_3.reload.deployment_server?).to be_truthy
+ expect(tracker_4.reload.deployment_server?).to be_truthy
+ expect(tracker_nextbatch.reload.deployment_unknown?).to be_truthy
+ end
+ end
+
+ it_behaves_like 'marks background migration job records' do
+ let(:arguments) { [1, 4] }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb
new file mode 100644
index 00000000000..c2daa35703d
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20201103110018 do
+ let(:merge_requests) { table(:merge_requests) }
+ let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) }
+ let(:metrics) { table(:merge_request_metrics) }
+
+ let(:namespace) { table(:namespaces).create!(name: 'name', path: 'path') }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id) }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ let!(:open_mr) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master') }
+
+ let!(:closed_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
+ let!(:closed_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
+ let!(:closed_mr_1_metrics) { metrics.create!(merge_request_id: closed_mr_1.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+ let!(:closed_mr_2_metrics) { metrics.create!(merge_request_id: closed_mr_2.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+ let!(:closed_mr_2_cleanup_schedule) { cleanup_schedules.create!(merge_request_id: closed_mr_2.id, scheduled_at: Time.current) }
+
+ let!(:merged_mr_1) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3) }
+ let!(:merged_mr_2) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 3, updated_at: Time.current) }
+ let!(:merged_mr_1_metrics) { metrics.create!(merge_request_id: merged_mr_1.id, target_project_id: project.id, merged_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+
+ let!(:closed_mr_3) { merge_requests.create!(target_project_id: project.id, source_branch: 'master', target_branch: 'master', state_id: 2) }
+ let!(:closed_mr_3_metrics) { metrics.create!(merge_request_id: closed_mr_3.id, target_project_id: project.id, latest_closed_at: Time.current, created_at: Time.current, updated_at: Time.current) }
+
+ it 'creates records for all closed and merged merge requests in range' do
+ expect(Gitlab::BackgroundMigration::Logger).to receive(:info).with(
+ message: 'Backfilled merge_request_cleanup_schedules records',
+ count: 3
+ )
+
+ subject.perform(open_mr.id, merged_mr_2.id)
+
+ aggregate_failures do
+ expect(cleanup_schedules.all.pluck(:merge_request_id))
+ .to contain_exactly(closed_mr_1.id, closed_mr_2.id, merged_mr_1.id, merged_mr_2.id)
+ expect(cleanup_schedules.find_by(merge_request_id: closed_mr_1.id).scheduled_at.to_s)
+ .to eq((closed_mr_1_metrics.latest_closed_at + 14.days).to_s)
+ expect(cleanup_schedules.find_by(merge_request_id: closed_mr_2.id).scheduled_at.to_s)
+ .to eq(closed_mr_2_cleanup_schedule.scheduled_at.to_s)
+ expect(cleanup_schedules.find_by(merge_request_id: merged_mr_1.id).scheduled_at.to_s)
+ .to eq((merged_mr_1_metrics.merged_at + 14.days).to_s)
+ expect(cleanup_schedules.find_by(merge_request_id: merged_mr_2.id).scheduled_at.to_s)
+ .to eq((merged_mr_2.updated_at + 14.days).to_s)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
index 1637589d272..934ab7e37f8 100644
--- a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
+++ b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
# rubocop: disable RSpec/FactoriesInMigrationSpecs
-RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
+RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover, :aggregate_failures do
let(:test_dir) { FileUploader.options['storage_path'] }
let(:filename) { 'image.png' }
@@ -67,27 +67,35 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
end
end
- shared_examples 'migrates the file correctly' do
- before do
+ shared_examples 'migrates the file correctly' do |remote|
+ it 'creates a new upload record correctly, updates the legacy upload note so that it references the file in the markdown, removes the attachment from the note model, removes the file, moves legacy uploads to the correct location, removes the upload record' do
+ expect(File.exist?(legacy_upload.absolute_path)).to be_truthy unless remote
+
described_class.new(legacy_upload).execute
- end
- it 'creates a new uplaod record correctly' do
expect(new_upload.secret).not_to be_nil
- expect(new_upload.path).to end_with("#{new_upload.secret}/image.png")
+ expect(new_upload.path).to end_with("#{new_upload.secret}/#{filename}")
expect(new_upload.model_id).to eq(project.id)
expect(new_upload.model_type).to eq('Project')
expect(new_upload.uploader).to eq('FileUploader')
- end
- it 'updates the legacy upload note so that it references the file in the markdown' do
- expected_path = File.join('/uploads', new_upload.secret, 'image.png')
+ expected_path = File.join('/uploads', new_upload.secret, filename)
expected_markdown = "some note \n ![image](#{expected_path})"
+
expect(note.reload.note).to eq(expected_markdown)
- end
+ expect(note.attachment.file).to be_nil
+
+ if remote
+ expect(bucket.files.get(remote_file[:key])).to be_nil
+ connection = ::Fog::Storage.new(FileUploader.object_store_credentials)
+ expect(connection.get_object('uploads', new_upload.path)[:status]).to eq(200)
+ else
+ expect(File.exist?(legacy_upload.absolute_path)).to be_falsey
+ expected_path = File.join(test_dir, 'uploads', project.disk_path, new_upload.secret, filename)
+ expect(File.exist?(expected_path)).to be_truthy
+ end
- it 'removes the attachment from the note model' do
- expect(note.reload.attachment.file).to be_nil
+ expect { legacy_upload.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
@@ -120,23 +128,6 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
end
context 'when the upload is in local storage' do
- shared_examples 'legacy local file' do
- it 'removes the file correctly' do
- expect(File.exist?(legacy_upload.absolute_path)).to be_truthy
-
- described_class.new(legacy_upload).execute
-
- expect(File.exist?(legacy_upload.absolute_path)).to be_falsey
- end
-
- it 'moves legacy uploads to the correct location' do
- described_class.new(legacy_upload).execute
-
- expected_path = File.join(test_dir, 'uploads', project.disk_path, new_upload.secret, filename)
- expect(File.exist?(expected_path)).to be_truthy
- end
- end
-
context 'when the upload file does not exist on the filesystem' do
let(:legacy_upload) { create_upload(note, filename, false) }
@@ -201,15 +192,11 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
path: "uploads/-/system/note/attachment/#{note.id}/#{filename}", model: note, mount_point: nil)
end
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
context 'when the file can be handled correctly' do
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
end
@@ -217,17 +204,13 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
context 'when the file belongs to a legacy project' do
let(:project) { legacy_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
context 'when the file belongs to a hashed project' do
let(:project) { hashed_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
end
@@ -244,17 +227,13 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
context 'when the file belongs to a legacy project' do
let(:project) { legacy_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
context 'when the file belongs to a hashed project' do
let(:project) { hashed_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy local file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', false
end
end
end
@@ -272,23 +251,6 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
stub_uploads_object_storage(FileUploader)
end
- shared_examples 'legacy remote file' do
- it 'removes the file correctly' do
- # expect(bucket.files.get(remote_file[:key])).to be_nil
-
- described_class.new(legacy_upload).execute
-
- expect(bucket.files.get(remote_file[:key])).to be_nil
- end
-
- it 'moves legacy uploads to the correct remote location' do
- described_class.new(legacy_upload).execute
-
- connection = ::Fog::Storage.new(FileUploader.object_store_credentials)
- expect(connection.get_object('uploads', new_upload.path)[:status]).to eq(200)
- end
- end
-
context 'when the upload file does not exist on the filesystem' do
it_behaves_like 'legacy upload deletion'
end
@@ -300,9 +262,7 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
let(:project) { legacy_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy remote file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', true
end
context 'when the file belongs to a hashed project' do
@@ -312,9 +272,7 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
let(:project) { hashed_project }
- it_behaves_like 'migrates the file correctly'
- it_behaves_like 'legacy remote file'
- it_behaves_like 'legacy upload deletion'
+ it_behaves_like 'migrates the file correctly', true
end
end
end
diff --git a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..c6385340ca3
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateHasVulnerabilities, schema: 20201103192526 do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_settings) { table(:project_settings) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+
+ let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:vulnerability_base_params) { { title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, author_id: user.id } }
+
+ let!(:project_1) { projects.create!(namespace_id: namespace.id, name: 'foo_1') }
+ let!(:project_2) { projects.create!(namespace_id: namespace.id, name: 'foo_2') }
+ let!(:project_3) { projects.create!(namespace_id: namespace.id, name: 'foo_3') }
+
+ before do
+ project_settings.create!(project_id: project_1.id)
+ vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_1.id))
+ vulnerabilities.create!(vulnerability_base_params.merge(project_id: project_3.id))
+
+ allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, error: true)
+ end
+
+ describe '#perform' do
+ it 'sets `has_vulnerabilities` attribute of project_settings' do
+ expect { subject.perform(project_1.id, project_3.id) }.to change { project_settings.count }.from(1).to(2)
+ .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
+ end
+
+ it 'writes info log message' do
+ subject.perform(project_1.id, project_3.id)
+
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
+ message: 'Projects has been processed to populate `has_vulnerabilities` information',
+ count: 2)
+ end
+
+ context 'when non-existing project_id is given' do
+ it 'populates only for the existing projects' do
+ expect { subject.perform(project_1.id, 0, project_3.id) }.to change { project_settings.count }.from(1).to(2)
+ .and change { project_settings.where(has_vulnerabilities: true).count }.from(0).to(2)
+ end
+ end
+
+ context 'when an error happens' do
+ before do
+ allow(described_class::ProjectSetting).to receive(:upsert_for).and_raise('foo')
+ end
+
+ it 'writes error log message' do
+ subject.perform(project_1.id, project_3.id)
+
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:error).with(migrator: described_class.name,
+ message: 'foo',
+ project_ids: [project_1.id, project_3.id])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb
new file mode 100644
index 00000000000..44c5f3d1381
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20201028160832 do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:findings) { table(:vulnerability_occurrences) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:identifiers) { table(:vulnerability_identifiers) }
+ let(:feedback) { table(:vulnerability_feedback) }
+
+ let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
+ let(:vulnerability_1) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
+ let(:vulnerability_2) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
+ let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'identifier') }
+
+ before do
+ feedback.create!(feedback_type: 0,
+ category: 'sast',
+ project_fingerprint: '418291a26024a1445b23fe64de9380cdcdfd1fa8',
+ project_id: project.id,
+ author_id: user.id,
+ created_at: Time.current)
+
+ findings.create!(name: 'Finding',
+ report_type: 'sast',
+ project_fingerprint: Gitlab::Database::ShaAttribute.new.serialize('418291a26024a1445b23fe64de9380cdcdfd1fa8'),
+ location_fingerprint: 'bar',
+ severity: 1,
+ confidence: 1,
+ metadata_version: 1,
+ raw_metadata: '',
+ uuid: SecureRandom.uuid,
+ project_id: project.id,
+ vulnerability_id: vulnerability_1.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: identifier.id)
+
+ allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, warn: true, error: true)
+ end
+
+ describe '#perform' do
+ it 'updates the missing dismissal information of the vulnerability' do
+ expect { subject.perform(vulnerability_1.id, vulnerability_2.id) }.to change { vulnerability_1.reload.dismissed_at }.from(nil)
+ .and change { vulnerability_1.reload.dismissed_by_id }.from(nil).to(user.id)
+ end
+
+ it 'writes log messages' do
+ subject.perform(vulnerability_1.id, vulnerability_2.id)
+
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
+ message: 'Dismissal information has been copied',
+ count: 2)
+ expect(::Gitlab::BackgroundMigration::Logger).to have_received(:warn).with(migrator: described_class.name,
+ message: 'Could not update vulnerability!',
+ vulnerability_id: vulnerability_2.id)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
index fa4f2d1fd88..561a602fab9 100644
--- a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
+++ b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
@@ -9,28 +9,34 @@ RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20201
let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') }
let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') }
let(:issue_links) { table(:issue_links) }
- let!(:blocks_link) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) }
- let!(:bidirectional_link) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) }
- let!(:blocked_link) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) }
+ let!(:blocked_link1) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) }
+ let!(:opposite_link1) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) }
+ let!(:blocked_link2) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) }
+ let!(:opposite_link2) { issue_links.create!(source_id: issue3.id, target_id: issue1.id, link_type: 0) }
+ let!(:nochange_link) { issue_links.create!(source_id: issue2.id, target_id: issue3.id, link_type: 1) }
subject { described_class.new.perform(issue_links.minimum(:id), issue_links.maximum(:id)) }
- it 'deletes issue links where opposite relation already exists' do
- expect { subject }.to change { issue_links.count }.by(-1)
+ it 'deletes any opposite relations' do
+ subject
+
+ expect(issue_links.ids).to match_array([nochange_link.id, blocked_link1.id, blocked_link2.id])
end
it 'ignores issue links other than blocked_by' do
subject
- expect(blocks_link.reload.link_type).to eq(1)
+ expect(nochange_link.reload.link_type).to eq(1)
end
it 'updates blocked_by issue links' do
subject
- link = blocked_link.reload
- expect(link.link_type).to eq(1)
- expect(link.source_id).to eq(issue3.id)
- expect(link.target_id).to eq(issue1.id)
+ expect(blocked_link1.reload.link_type).to eq(1)
+ expect(blocked_link1.source_id).to eq(issue1.id)
+ expect(blocked_link1.target_id).to eq(issue2.id)
+ expect(blocked_link2.reload.link_type).to eq(1)
+ expect(blocked_link2.source_id).to eq(issue3.id)
+ expect(blocked_link2.target_id).to eq(issue1.id)
end
end
diff --git a/spec/lib/gitlab/badge/coverage/report_spec.rb b/spec/lib/gitlab/badge/coverage/report_spec.rb
index 4a9508712a4..3b5ea3291e4 100644
--- a/spec/lib/gitlab/badge/coverage/report_spec.rb
+++ b/spec/lib/gitlab/badge/coverage/report_spec.rb
@@ -3,13 +3,24 @@
require 'spec_helper'
RSpec.describe Gitlab::Badge::Coverage::Report do
- let(:project) { create(:project, :repository) }
- let(:job_name) { nil }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:success_pipeline) { create(:ci_pipeline, :success, project: project) }
+ let_it_be(:running_pipeline) { create(:ci_pipeline, :running, project: project) }
+ let_it_be(:failure_pipeline) { create(:ci_pipeline, :failed, project: project) }
+
+ let_it_be(:builds) do
+ [
+ create(:ci_build, :success, pipeline: success_pipeline, coverage: 40, created_at: 9.seconds.ago, name: 'coverage'),
+ create(:ci_build, :success, pipeline: success_pipeline, coverage: 60, created_at: 8.seconds.ago)
+ ]
+ end
let(:badge) do
described_class.new(project, 'master', opts: { job: job_name })
end
+ let(:job_name) { nil }
+
describe '#entity' do
it 'describes a coverage' do
expect(badge.entity).to eq 'coverage'
@@ -28,81 +39,61 @@ RSpec.describe Gitlab::Badge::Coverage::Report do
end
end
- shared_examples 'unknown coverage report' do
- context 'particular job specified' do
- let(:job_name) { '' }
-
- it 'returns nil' do
- expect(badge.status).to be_nil
+ describe '#status' do
+ context 'with no job specified' do
+ it 'returns the most recent successful pipeline coverage value' do
+ expect(badge.status).to eq(50.00)
end
- end
- context 'particular job not specified' do
- let(:job_name) { nil }
+ context 'and no successful pipelines' do
+ before do
+ allow(badge).to receive(:successful_pipeline).and_return(nil)
+ end
- it 'returns nil' do
- expect(badge.status).to be_nil
+ it 'returns nil' do
+ expect(badge.status).to eq(nil)
+ end
end
end
- end
- context 'when latest successful pipeline exists' do
- before do
- create_pipeline do |pipeline|
- create(:ci_build, :success, pipeline: pipeline, name: 'first', coverage: 40)
- create(:ci_build, :success, pipeline: pipeline, coverage: 60)
- end
+ context 'with a blank job name' do
+ let(:job_name) { ' ' }
- create_pipeline do |pipeline|
- create(:ci_build, :failed, pipeline: pipeline, coverage: 10)
+ it 'returns the latest successful pipeline coverage value' do
+ expect(badge.status).to eq(50.00)
end
end
- context 'when particular job specified' do
- let(:job_name) { 'first' }
+ context 'with an unmatching job name specified' do
+ let(:job_name) { 'incorrect name' }
- it 'returns coverage for the particular job' do
- expect(badge.status).to eq 40
+ it 'returns nil' do
+ expect(badge.status).to be_nil
end
end
- context 'when particular job not specified' do
- let(:job_name) { '' }
+ context 'with a matching job name specified' do
+ let(:job_name) { 'coverage' }
- it 'returns arithemetic mean for the pipeline' do
- expect(badge.status).to eq 50
+ it 'returns the pipeline coverage value' do
+ expect(badge.status).to eq(40.00)
end
- end
- end
-
- context 'when only failed pipeline exists' do
- before do
- create_pipeline do |pipeline|
- create(:ci_build, :failed, pipeline: pipeline, coverage: 10)
- end
- end
-
- it_behaves_like 'unknown coverage report'
- context 'particular job specified' do
- let(:job_name) { 'nonexistent' }
+ context 'with a more recent running pipeline' do
+ let!(:another_build) { create(:ci_build, :success, pipeline: running_pipeline, coverage: 20, created_at: 7.seconds.ago, name: 'coverage') }
- it 'retruns nil' do
- expect(badge.status).to be_nil
+ it 'returns the running pipeline coverage value' do
+ expect(badge.status).to eq(20.00)
+ end
end
- end
- end
- context 'pipeline does not exist' do
- it_behaves_like 'unknown coverage report'
- end
-
- def create_pipeline
- opts = { project: project, sha: project.commit.id, ref: 'master' }
+ context 'with a more recent failed pipeline' do
+ let!(:another_build) { create(:ci_build, :success, pipeline: failure_pipeline, coverage: 10, created_at: 6.seconds.ago, name: 'coverage') }
- create(:ci_pipeline, opts).tap do |pipeline|
- yield pipeline
- ::Ci::ProcessPipelineService.new(pipeline).execute
+ it 'returns the failed pipeline coverage value' do
+ expect(badge.status).to eq(10.00)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
index 80ec5ec1fc7..c9ad78ec760 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
@@ -112,7 +112,13 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
allow(subject).to receive(:delete_temp_branches)
allow(subject).to receive(:restore_branches)
- allow(subject.client).to receive(:pull_requests).and_return([pull_request])
+ allow(subject.client).to receive(:pull_requests).and_return([pull_request], [])
+ end
+
+ # As we are using Caching with redis, it is best to clean the cache after each test run, else we need to wait for
+ # the expiration by the importer
+ after do
+ Gitlab::Cache::Import::Caching.expire(subject.already_imported_cache_key, 0)
end
it 'imports merge event' do
@@ -463,6 +469,47 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
subject.execute
end
+
+ describe 'import pull requests with caching' do
+ let(:pull_request_already_imported) do
+ instance_double(
+ BitbucketServer::Representation::PullRequest,
+ iid: 11)
+ end
+
+ let(:pull_request_to_be_imported) do
+ instance_double(
+ BitbucketServer::Representation::PullRequest,
+ iid: 12,
+ source_branch_sha: sample.commits.last,
+ source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
+ target_branch_sha: sample.commits.first,
+ target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
+ title: 'This is a title',
+ description: 'This is a test pull request',
+ state: 'merged',
+ author: 'Test Author',
+ author_email: pull_request_author.email,
+ author_username: pull_request_author.username,
+ created_at: Time.now,
+ updated_at: Time.now,
+ raw: {},
+ merged?: true)
+ end
+
+ before do
+ Gitlab::Cache::Import::Caching.set_add(subject.already_imported_cache_key, pull_request_already_imported.iid)
+ allow(subject.client).to receive(:pull_requests).and_return([pull_request_to_be_imported, pull_request_already_imported], [])
+ end
+
+ it 'only imports one Merge Request, as the other on is in the cache' do
+ expect(subject.client).to receive(:activities).and_return([merge_event])
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ expect(Gitlab::Cache::Import::Caching.set_includes?(subject.already_imported_cache_key, pull_request_already_imported.iid)).to eq(true)
+ expect(Gitlab::Cache::Import::Caching.set_includes?(subject.already_imported_cache_key, pull_request_to_be_imported.iid)).to eq(true)
+ end
+ end
end
describe 'inaccessible branches' do
@@ -488,7 +535,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
updated_at: Time.now,
merged?: true)
- expect(subject.client).to receive(:pull_requests).and_return([pull_request])
+ expect(subject.client).to receive(:pull_requests).and_return([pull_request], [])
expect(subject.client).to receive(:activities).and_return([])
expect(subject).to receive(:import_repository).twice
end
@@ -525,4 +572,36 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
expect { subject.execute }.to change { MergeRequest.count }.by(1)
end
end
+
+ context "lfs files" do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(true)
+ allow(subject).to receive(:import_repository)
+ allow(subject).to receive(:import_pull_requests)
+ end
+
+ it "downloads lfs objects if lfs_enabled is enabled for project" do
+ expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |lfs_import_service|
+ expect(lfs_import_service).to receive(:execute).and_return(status: :success)
+ end
+
+ subject.execute
+ end
+
+ it "adds the error message when the lfs download fails" do
+ allow_next_instance_of(Projects::LfsPointers::LfsImportService) do |lfs_import_service|
+ expect(lfs_import_service).to receive(:execute).and_return(status: :error, message: "LFS server not reachable")
+ end
+
+ subject.execute
+
+ expect(project.import_state.reload.last_error).to eq(Gitlab::Json.dump({
+ message: "The remote data could not be fully imported.",
+ errors: [{
+ type: "lfs_objects",
+ errors: "The Lfs import process failed. LFS server not reachable"
+ }]
+ }))
+ end
+ end
end
diff --git a/spec/lib/gitlab/bulk_import/client_spec.rb b/spec/lib/gitlab/bulk_import/client_spec.rb
deleted file mode 100644
index a6f8dd6d194..00000000000
--- a/spec/lib/gitlab/bulk_import/client_spec.rb
+++ /dev/null
@@ -1,95 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BulkImport::Client do
- include ImportSpecHelper
-
- let(:uri) { 'http://gitlab.example' }
- let(:token) { 'token' }
- let(:resource) { 'resource' }
-
- subject { described_class.new(uri: uri, token: token) }
-
- describe '#get' do
- let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
-
- shared_examples 'performs network request' do
- it 'performs network request' do
- expect(Gitlab::HTTP).to receive(:get).with(*expected_args).and_return(response_double)
-
- subject.get(resource)
- end
- end
-
- describe 'parsed response' do
- it 'returns parsed response' do
- response_double = double(code: 200, success?: true, parsed_response: [{ id: 1 }, { id: 2 }])
-
- allow(Gitlab::HTTP).to receive(:get).and_return(response_double)
-
- expect(subject.get(resource)).to eq(response_double.parsed_response)
- end
- end
-
- describe 'request query' do
- include_examples 'performs network request' do
- let(:expected_args) do
- [
- anything,
- hash_including(
- query: {
- page: described_class::DEFAULT_PAGE,
- per_page: described_class::DEFAULT_PER_PAGE
- }
- )
- ]
- end
- end
- end
-
- describe 'request headers' do
- include_examples 'performs network request' do
- let(:expected_args) do
- [
- anything,
- hash_including(
- headers: {
- 'Content-Type' => 'application/json',
- 'Authorization' => "Bearer #{token}"
- }
- )
- ]
- end
- end
- end
-
- describe 'request uri' do
- include_examples 'performs network request' do
- let(:expected_args) do
- ['http://gitlab.example:80/api/v4/resource', anything]
- end
- end
- end
-
- context 'error handling' do
- context 'when error occurred' do
- it 'raises ConnectionError' do
- allow(Gitlab::HTTP).to receive(:get).and_raise(Errno::ECONNREFUSED)
-
- expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
- end
- end
-
- context 'when response is not success' do
- it 'raises ConnectionError' do
- response_double = double(code: 503, success?: false)
-
- allow(Gitlab::HTTP).to receive(:get).and_return(response_double)
-
- expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/chat/output_spec.rb b/spec/lib/gitlab/chat/output_spec.rb
index 38e17c39fad..34f6bc0904c 100644
--- a/spec/lib/gitlab/chat/output_spec.rb
+++ b/spec/lib/gitlab/chat/output_spec.rb
@@ -8,62 +8,67 @@ RSpec.describe Gitlab::Chat::Output do
end
let(:output) { described_class.new(build) }
+ let(:trace) { Gitlab::Ci::Trace.new(build) }
+
+ before do
+ trace.set("\e[0KRunning with gitlab-runner 13.4.0~beta.108.g2ed41114 (2ed41114)
+\e[0;m\e[0K on GDK local runner g_XWCUS4
+\e[0;msection_start:1604068171:resolve_secrets\r\e[0K\e[0K\e[36;1mResolving secrets\e[0;m
+\e[0;msection_end:1604068171:resolve_secrets\r\e[0Ksection_start:1604068171:prepare_executor\r\e[0K\e[0K\e[36;1mPreparing the \"docker\" executor\e[0;m
+\e[0;m\e[0KUsing Docker executor with image ubuntu:20.04 ...
+\e[0;m\e[0KUsing locally found image version due to if-not-present pull policy
+\e[0;m\e[0KUsing docker image sha256:d70eaf7277eada08fca944de400e7e4dd97b1262c06ed2b1011500caa4decaf1 for ubuntu:20.04 with digest ubuntu@sha256:fff16eea1a8ae92867721d90c59a75652ea66d29c05294e6e2f898704bdb8cf1 ...
+\e[0;msection_end:1604068172:prepare_executor\r\e[0Ksection_start:1604068172:prepare_script\r\e[0K\e[0K\e[36;1mPreparing environment\e[0;m
+\e[0;mRunning on runner-gxwcus4-project-21-concurrent-0 via MacBook-Pro.local...
+section_end:1604068173:prepare_script\r\e[0Ksection_start:1604068173:get_sources\r\e[0K\e[0K\e[36;1mGetting source from Git repository\e[0;m
+\e[0;m\e[32;1mFetching changes with git depth set to 50...\e[0;m
+Initialized empty Git repository in /builds/267388-group-1/playground/.git/
+\e[32;1mCreated fresh repository.\e[0;m
+\e[32;1mChecking out 6c8eb7f4 as master...\e[0;m
+
+\e[32;1mSkipping Git submodules setup\e[0;m
+section_end:1604068175:get_sources\r\e[0Ksection_start:1604068175:step_script\r\e[0K\e[0K\e[36;1mExecuting \"step_script\" stage of the job script\e[0;m
+\e[0;m\e[32;1m$ echo \"success!\"\e[0;m
+success!
+section_end:1604068175:step_script\r\e[0Ksection_start:1604068175:chat_reply\r\033[0K
+Chat Reply
+section_end:1604068176:chat_reply\r\033[0K\e[32;1mJob succeeded
+\e[0;m")
+ end
describe '#to_s' do
- it 'returns the build output as a String' do
- trace = Gitlab::Ci::Trace.new(build)
-
- trace.set("echo hello\nhello")
-
- allow(build)
- .to receive(:trace)
- .and_return(trace)
-
- allow(output)
- .to receive(:read_offset_and_length)
- .and_return([0, 13])
-
- expect(output.to_s).to eq('he')
+ it 'returns the chat reply as a String' do
+ expect(output.to_s).to eq("Chat Reply")
end
- end
- describe '#read_offset_and_length' do
context 'without the chat_reply trace section' do
- it 'falls back to using the build_script trace section' do
- expect(output)
- .to receive(:find_build_trace_section)
- .with('chat_reply')
- .and_return(nil)
-
- expect(output)
- .to receive(:find_build_trace_section)
- .with('build_script')
- .and_return({ name: 'build_script', byte_start: 1, byte_end: 4 })
-
- expect(output.read_offset_and_length).to eq([1, 3])
+ before do
+ trace.set(trace.raw.gsub('chat_reply', 'not_found'))
end
- end
- context 'without the build_script trace section' do
- it 'raises MissingBuildSectionError' do
- expect { output.read_offset_and_length }
- .to raise_error(described_class::MissingBuildSectionError)
+ it 'falls back to using the step_script trace section' do
+ expect(output.to_s).to eq("\e[0;m\e[32;1m$ echo \"success!\"\e[0;m\nsuccess!")
end
- end
-
- context 'with the chat_reply trace section' do
- it 'returns the read offset and length as an Array' do
- trace = Gitlab::Ci::Trace.new(build)
-
- allow(build)
- .to receive(:trace)
- .and_return(trace)
-
- allow(trace)
- .to receive(:extract_sections)
- .and_return([{ name: 'chat_reply', byte_start: 1, byte_end: 4 }])
- expect(output.read_offset_and_length).to eq([1, 3])
+ context 'without the step_script trace section' do
+ before do
+ trace.set(trace.raw.gsub('step_script', 'build_script'))
+ end
+
+ it 'falls back to using the build_script trace section' do
+ expect(output.to_s).to eq("\e[0;m\e[32;1m$ echo \"success!\"\e[0;m\nsuccess!")
+ end
+
+ context 'without the build_script trace section' do
+ before do
+ trace.set(trace.raw.gsub('build_script', 'not_found'))
+ end
+
+ it 'raises MissingBuildSectionError' do
+ expect { output.to_s }
+ .to raise_error(described_class::MissingBuildSectionError)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
index cf52f601006..d20ea6c9202 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
@@ -13,5 +13,47 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
subject { described_class.new(globs).satisfied_by?(pipeline, nil) }
end
+
+ context 'when using variable expansion' do
+ let(:pipeline) { build(:ci_pipeline) }
+ let(:modified_paths) { ['helm/test.txt'] }
+ let(:globs) { ['$HELM_DIR/**/*'] }
+ let(:context) { double('context') }
+
+ subject { described_class.new(globs).satisfied_by?(pipeline, context) }
+
+ before do
+ allow(pipeline).to receive(:modified_paths).and_return(modified_paths)
+ end
+
+ context 'when context is nil' do
+ let(:context) {}
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when context has the specified variables' do
+ let(:variables) do
+ [{ key: "HELM_DIR", value: "helm", public: true }]
+ end
+
+ before do
+ allow(context).to receive(:variables).and_return(variables)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when variable expansion does not match' do
+ let(:globs) { ['path/with/$in/it/*'] }
+ let(:modified_paths) { ['path/with/$in/it/file.txt'] }
+
+ before do
+ allow(context).to receive(:variables).and_return([])
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/charts_spec.rb b/spec/lib/gitlab/ci/charts_spec.rb
index e00e5ed3920..cfc2019a89b 100644
--- a/spec/lib/gitlab/ci/charts_spec.rb
+++ b/spec/lib/gitlab/ci/charts_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Charts do
- context "yearchart" do
+ context 'yearchart' do
let(:project) { create(:project) }
let(:chart) { Gitlab::Ci::Charts::YearChart.new(project) }
@@ -16,9 +16,13 @@ RSpec.describe Gitlab::Ci::Charts do
it 'starts at the beginning of the current year' do
expect(chart.from).to eq(chart.to.years_ago(1).beginning_of_month.beginning_of_day)
end
+
+ it 'uses %B %Y as labels format' do
+ expect(chart.labels).to include(chart.from.strftime('%B %Y'))
+ end
end
- context "monthchart" do
+ context 'monthchart' do
let(:project) { create(:project) }
let(:chart) { Gitlab::Ci::Charts::MonthChart.new(project) }
@@ -31,9 +35,13 @@ RSpec.describe Gitlab::Ci::Charts do
it 'starts one month ago' do
expect(chart.from).to eq(1.month.ago.beginning_of_day)
end
+
+ it 'uses %d %B as labels format' do
+ expect(chart.labels).to include(chart.from.strftime('%d %B'))
+ end
end
- context "weekchart" do
+ context 'weekchart' do
let(:project) { create(:project) }
let(:chart) { Gitlab::Ci::Charts::WeekChart.new(project) }
@@ -46,9 +54,13 @@ RSpec.describe Gitlab::Ci::Charts do
it 'starts one week ago' do
expect(chart.from).to eq(1.week.ago.beginning_of_day)
end
+
+ it 'uses %d %B as labels format' do
+ expect(chart.labels).to include(chart.from.strftime('%d %B'))
+ end
end
- context "pipeline_times" do
+ context 'pipeline_times' do
let(:project) { create(:project) }
let(:chart) { Gitlab::Ci::Charts::PipelineTime.new(project) }
diff --git a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
index 3388ae0af2f..ff44a235ea5 100644
--- a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
@@ -46,98 +46,53 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
end
end
- context 'with one_dimensional_matrix feature flag enabled' do
- before do
- stub_feature_flags(one_dimensional_matrix: true)
- matrix.compose!
+ context 'when entry config has only one variable with multiple values' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[build test]
+ }
+ ]
end
- context 'when entry config has only one variable with multiple values' do
- let(:config) do
- [
- {
- 'VAR_1' => %w[build test]
- }
- ]
- end
-
- describe '#valid?' do
- it { is_expected.to be_valid }
- end
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
- describe '#errors' do
- it 'returns no errors' do
- expect(matrix.errors)
- .to be_empty
- end
+ describe '#errors' do
+ it 'returns no errors' do
+ expect(matrix.errors)
+ .to be_empty
end
+ end
- describe '#value' do
- before do
- matrix.compose!
- end
-
- it 'returns the value without raising an error' do
- expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
- end
+ describe '#value' do
+ before do
+ matrix.compose!
end
- context 'when entry config has only one variable with one value' do
- let(:config) do
- [
- {
- 'VAR_1' => %w[test]
- }
- ]
- end
-
- describe '#valid?' do
- it { is_expected.to be_valid }
- end
-
- describe '#errors' do
- it 'returns no errors' do
- expect(matrix.errors)
- .to be_empty
- end
- end
-
- describe '#value' do
- before do
- matrix.compose!
- end
-
- it 'returns the value without raising an error' do
- expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
- end
- end
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
end
end
- end
- context 'with one_dimensional_matrix feature flag disabled' do
- before do
- stub_feature_flags(one_dimensional_matrix: false)
- matrix.compose!
- end
-
- context 'when entry config has only one variable with multiple values' do
+ context 'when entry config has only one variable with one value' do
let(:config) do
[
{
- 'VAR_1' => %w[build test]
+ 'VAR_1' => %w[test]
}
]
end
describe '#valid?' do
- it { is_expected.not_to be_valid }
+ it { is_expected.to be_valid }
end
describe '#errors' do
- it 'returns error about too many jobs' do
+ it 'returns no errors' do
expect(matrix.errors)
- .to include('variables config requires at least 2 items')
+ .to be_empty
end
end
@@ -147,38 +102,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
end
it 'returns the value without raising an error' do
- expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
- end
- end
-
- context 'when entry config has only one variable with one value' do
- let(:config) do
- [
- {
- 'VAR_1' => %w[test]
- }
- ]
- end
-
- describe '#valid?' do
- it { is_expected.not_to be_valid }
- end
-
- describe '#errors' do
- it 'returns no errors' do
- expect(matrix.errors)
- .to include('variables config requires at least 2 items')
- end
- end
-
- describe '#value' do
- before do
- matrix.compose!
- end
-
- it 'returns the value without raising an error' do
- expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
- end
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
index 407efb438b5..5e920ce34e0 100644
--- a/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
-# After Feature one_dimensional_matrix is removed, this can be changed back to fast_spec_helper
-require 'spec_helper'
+require 'fast_spec_helper'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Product::Variables do
@@ -46,70 +45,18 @@ RSpec.describe Gitlab::Ci::Config::Entry::Product::Variables do
end
end
- context 'with one_dimensional_matrix feature flag enabled' do
- context 'with only one variable' do
- before do
- stub_feature_flags(one_dimensional_matrix: true)
- end
- let(:config) { { VAR: 'test' } }
-
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
+ context 'with only one variable' do
+ let(:config) { { VAR: 'test' } }
- describe '#errors' do
- it 'does not append errors' do
- expect(entry.errors).to be_empty
- end
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
end
end
- end
-
- context 'with one_dimensional_matrix feature flag disabled' do
- context 'when entry value is not correct' do
- before do
- stub_feature_flags(one_dimensional_matrix: false)
- end
- shared_examples 'invalid variables' do |message|
- describe '#errors' do
- it 'saves errors' do
- expect(entry.errors).to include(message)
- end
- end
-
- describe '#valid?' do
- it 'is not valid' do
- expect(entry).not_to be_valid
- end
- end
- end
-
- context 'with array' do
- let(:config) { [:VAR, 'test'] }
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
-
- context 'with empty array' do
- let(:config) { { VAR: 'test', VAR2: [] } }
-
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
-
- context 'with nested array' do
- let(:config) { { VAR: 'test', VAR2: [1, [2]] } }
-
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
-
- context 'with one_dimensional_matrix feature flag disabled' do
- context 'with only one variable' do
- let(:config) { { VAR: 'test' } }
-
- it_behaves_like 'invalid variables', /variables config requires at least 2 items/
- end
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index bf14d8d6b34..7ad57827e30 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -100,6 +100,42 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
expect { subject }.to raise_error(described_class::AmbigiousSpecificationError)
end
end
+
+ context "when the key is a project's file" do
+ let(:values) do
+ { include: { project: project.full_path, file: local_file },
+ image: 'ruby:2.7' }
+ end
+
+ it 'returns File instances' do
+ expect(subject).to contain_exactly(
+ an_instance_of(Gitlab::Ci::Config::External::File::Project))
+ end
+ end
+
+ context "when the key is project's files" do
+ let(:values) do
+ { include: { project: project.full_path, file: [local_file, 'another_file_path.yml'] },
+ image: 'ruby:2.7' }
+ end
+
+ it 'returns two File instances' do
+ expect(subject).to contain_exactly(
+ an_instance_of(Gitlab::Ci::Config::External::File::Project),
+ an_instance_of(Gitlab::Ci::Config::External::File::Project))
+ end
+
+ context 'when FF ci_include_multiple_files_from_project is disabled' do
+ before do
+ stub_feature_flags(ci_include_multiple_files_from_project: false)
+ end
+
+ it 'returns a File instance' do
+ expect(subject).to contain_exactly(
+ an_instance_of(Gitlab::Ci::Config::External::File::Project))
+ end
+ end
+ end
end
context "when 'include' is defined as an array" do
@@ -161,6 +197,16 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
it 'raises an exception' do
expect { subject }.to raise_error(described_class::DuplicateIncludesError)
end
+
+ context 'when including multiple files from a project' do
+ let(:values) do
+ { include: { project: project.full_path, file: [local_file, local_file] } }
+ end
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(described_class::DuplicateIncludesError)
+ end
+ end
end
context "when too many 'includes' are defined" do
@@ -179,6 +225,16 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
it 'raises an exception' do
expect { subject }.to raise_error(described_class::TooManyIncludesError)
end
+
+ context 'when including multiple files from a project' do
+ let(:values) do
+ { include: { project: project.full_path, file: [local_file, 'another_file_path.yml'] } }
+ end
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(described_class::TooManyIncludesError)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index 9786e050399..150a2ec2929 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -302,5 +302,82 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
end
end
end
+
+ context 'when a valid project file is defined' do
+ let(:values) do
+ {
+ include: { project: another_project.full_path, file: '/templates/my-build.yml' },
+ image: 'ruby:2.7'
+ }
+ end
+
+ before do
+ another_project.add_developer(user)
+
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-build.yml') do
+ <<~HEREDOC
+ my_build:
+ script: echo Hello World
+ HEREDOC
+ end
+ end
+ end
+
+ it 'appends the file to the values' do
+ output = processor.perform
+ expect(output.keys).to match_array([:image, :my_build])
+ end
+ end
+
+ context 'when valid project files are defined in a single include' do
+ let(:values) do
+ {
+ include: {
+ project: another_project.full_path,
+ file: ['/templates/my-build.yml', '/templates/my-test.yml']
+ },
+ image: 'ruby:2.7'
+ }
+ end
+
+ before do
+ another_project.add_developer(user)
+
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-build.yml') do
+ <<~HEREDOC
+ my_build:
+ script: echo Hello World
+ HEREDOC
+ end
+
+ allow(repository).to receive(:blob_data_at).with(another_project.commit.id, '/templates/my-test.yml') do
+ <<~HEREDOC
+ my_test:
+ script: echo Hello World
+ HEREDOC
+ end
+ end
+ end
+
+ it 'appends the file to the values' do
+ output = processor.perform
+ expect(output.keys).to match_array([:image, :my_build, :my_test])
+ end
+
+ context 'when FF ci_include_multiple_files_from_project is disabled' do
+ before do
+ stub_feature_flags(ci_include_multiple_files_from_project: false)
+ end
+
+ it 'raises an error' do
+ expect { processor.perform }.to raise_error(
+ described_class::IncludeError,
+ 'Included file `["/templates/my-build.yml", "/templates/my-test.yml"]` needs to be a string'
+ )
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 41a45fe4ab7..b5a0f0e3fd7 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -246,6 +246,14 @@ RSpec.describe Gitlab::Ci::Config do
let(:remote_location) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:local_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
+ let(:local_file_content) do
+ File.read(Rails.root.join(local_location))
+ end
+
+ let(:local_location_hash) do
+ YAML.safe_load(local_file_content).deep_symbolize_keys
+ end
+
let(:remote_file_content) do
<<~HEREDOC
variables:
@@ -256,8 +264,8 @@ RSpec.describe Gitlab::Ci::Config do
HEREDOC
end
- let(:local_file_content) do
- File.read(Rails.root.join(local_location))
+ let(:remote_file_hash) do
+ YAML.safe_load(remote_file_content).deep_symbolize_keys
end
let(:gitlab_ci_yml) do
@@ -283,22 +291,11 @@ RSpec.describe Gitlab::Ci::Config do
context "when gitlab_ci_yml has valid 'include' defined" do
it 'returns a composed hash' do
- before_script_values = [
- "apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs", "ruby -v",
- "which ruby",
- "bundle install --jobs $(nproc) \"${FLAGS[@]}\""
- ]
- variables = {
- POSTGRES_USER: "user",
- POSTGRES_PASSWORD: "testing-password",
- POSTGRES_ENABLED: "true",
- POSTGRES_DB: "$CI_ENVIRONMENT_SLUG"
- }
composed_hash = {
- before_script: before_script_values,
+ before_script: local_location_hash[:before_script],
image: "ruby:2.7",
rspec: { script: ["bundle exec rspec"] },
- variables: variables
+ variables: remote_file_hash[:variables]
}
expect(config.to_hash).to eq(composed_hash)
@@ -575,5 +572,56 @@ RSpec.describe Gitlab::Ci::Config do
)
end
end
+
+ context "when including multiple files from a project" do
+ let(:other_file_location) { 'my_builds.yml' }
+
+ let(:other_file_content) do
+ <<~HEREDOC
+ build:
+ stage: build
+ script: echo hello
+
+ rspec:
+ stage: test
+ script: bundle exec rspec
+ HEREDOC
+ end
+
+ let(:gitlab_ci_yml) do
+ <<~HEREDOC
+ include:
+ - project: #{project.full_path}
+ file:
+ - #{local_location}
+ - #{other_file_location}
+
+ image: ruby:2.7
+ HEREDOC
+ end
+
+ before do
+ project.add_developer(user)
+
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:blob_data_at).with(an_instance_of(String), local_location)
+ .and_return(local_file_content)
+
+ allow(repository).to receive(:blob_data_at).with(an_instance_of(String), other_file_location)
+ .and_return(other_file_content)
+ end
+ end
+
+ it 'returns a composed hash' do
+ composed_hash = {
+ before_script: local_location_hash[:before_script],
+ image: "ruby:2.7",
+ build: { stage: "build", script: "echo hello" },
+ rspec: { stage: "test", script: "bundle exec rspec" }
+ }
+
+ expect(config.to_hash).to eq(composed_hash)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
index 9b133efad9c..3130c0c0c41 100644
--- a/spec/lib/gitlab/ci/jwt_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -93,32 +93,65 @@ RSpec.describe Gitlab::Ci::Jwt do
end
describe '.for_build' do
- let(:rsa_key) { OpenSSL::PKey::RSA.new(Rails.application.secrets.openid_connect_signing_key) }
+ shared_examples 'generating JWT for build' do
+ context 'when signing key is present' do
+ let(:rsa_key) { OpenSSL::PKey::RSA.generate(1024) }
+ let(:rsa_key_data) { rsa_key.to_s }
- subject(:jwt) { described_class.for_build(build) }
+ it 'generates JWT with key id' do
+ _payload, headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+
+ expect(headers['kid']).to eq(rsa_key.public_key.to_jwk['kid'])
+ end
+
+ it 'generates JWT for the given job with ttl equal to build timeout' do
+ expect(build).to receive(:metadata_timeout).and_return(3_600)
+
+ payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+ ttl = payload["exp"] - payload["iat"]
+
+ expect(ttl).to eq(3_600)
+ end
+
+ it 'generates JWT for the given job with default ttl if build timeout is not set' do
+ expect(build).to receive(:metadata_timeout).and_return(nil)
+
+ payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+ ttl = payload["exp"] - payload["iat"]
- it 'generates JWT with key id' do
- _payload, headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
+ expect(ttl).to eq(5.minutes.to_i)
+ end
+ end
+
+ context 'when signing key is missing' do
+ let(:rsa_key_data) { nil }
- expect(headers['kid']).to eq(rsa_key.public_key.to_jwk['kid'])
+ it 'raises NoSigningKeyError' do
+ expect { jwt }.to raise_error described_class::NoSigningKeyError
+ end
+ end
end
- it 'generates JWT for the given job with ttl equal to build timeout' do
- expect(build).to receive(:metadata_timeout).and_return(3_600)
+ subject(:jwt) { described_class.for_build(build) }
+
+ context 'when ci_jwt_signing_key feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_jwt_signing_key: false)
- payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
- ttl = payload["exp"] - payload["iat"]
+ allow(Rails.application.secrets).to receive(:openid_connect_signing_key).and_return(rsa_key_data)
+ end
- expect(ttl).to eq(3_600)
+ it_behaves_like 'generating JWT for build'
end
- it 'generates JWT for the given job with default ttl if build timeout is not set' do
- expect(build).to receive(:metadata_timeout).and_return(nil)
+ context 'when ci_jwt_signing_key feature flag is enabled' do
+ before do
+ stub_feature_flags(ci_jwt_signing_key: true)
- payload, _headers = JWT.decode(jwt, rsa_key.public_key, true, { algorithm: 'RS256' })
- ttl = payload["exp"] - payload["iat"]
+ stub_application_setting(ci_jwt_signing_key: rsa_key_data)
+ end
- expect(ttl).to eq(5.minutes.to_i)
+ it_behaves_like 'generating JWT for build'
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
new file mode 100644
index 00000000000..3eaecb11ae0
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let(:prev_pipeline) { create(:ci_pipeline, project: project) }
+ let(:new_commit) { create(:commit, project: project) }
+ let(:pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: prev_pipeline)
+ create(:ci_build, :interruptible, :success, pipeline: prev_pipeline)
+ create(:ci_build, :created, pipeline: prev_pipeline)
+
+ create(:ci_build, :interruptible, pipeline: pipeline)
+ end
+
+ describe '#perform!' do
+ subject(:perform) { step.perform! }
+
+ before do
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when auto-cancel is enabled' do
+ before do
+ project.update!(auto_cancel_pending_pipelines: 'enabled')
+ end
+
+ it 'cancels only previous interruptible builds' do
+ perform
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+
+ context 'when the previous pipeline has a child pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
+
+ context 'when the child pipeline has an interruptible job' do
+ before do
+ create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
+ end
+
+ it 'cancels interruptible builds of child pipeline' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running')
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('canceled')
+ end
+
+ context 'when FF ci_auto_cancel_all_pipelines is disabled' do
+ before do
+ stub_feature_flags(ci_auto_cancel_all_pipelines: false)
+ end
+
+ it 'does not cancel interruptible builds of child pipeline' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running')
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ end
+ end
+ end
+
+ context 'when the child pipeline has not an interruptible job' do
+ before do
+ create(:ci_build, :running, pipeline: child_pipeline)
+ end
+
+ it 'does not cancel the build of child pipeline' do
+ expect(build_statuses(child_pipeline)).to contain_exactly('running')
+
+ perform
+
+ expect(build_statuses(child_pipeline)).to contain_exactly('running')
+ end
+ end
+ end
+
+ context 'when the prev pipeline source is webide' do
+ let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) }
+
+ it 'does not cancel builds of the previous pipeline' do
+ perform
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('created', 'running', 'success')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+
+ context 'when auto-cancel is disabled' do
+ before do
+ project.update!(auto_cancel_pending_pipelines: 'disabled')
+ end
+
+ it 'does not cancel any build' do
+ subject
+
+ expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
+ expect(build_statuses(pipeline)).to contain_exactly('pending')
+ end
+ end
+ end
+
+ private
+
+ def build_statuses(pipeline)
+ pipeline.builds.pluck(:status)
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 8c02121857a..5506b079d0f 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
[
Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::SeedBlock.new(pipeline, command),
Gitlab::Ci::Pipeline::Chain::Seed.new(pipeline, command)
]
end
@@ -180,23 +181,21 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
->(pipeline) { pipeline.variables.create!(key: 'VAR', value: '123') }
end
- it 'wastes pipeline iid' do
- expect { run_chain }.to raise_error(ActiveRecord::RecordNotSaved)
-
- last_iid = InternalId.ci_pipelines
- .where(project_id: project.id)
- .last.last_value
-
- expect(last_iid).to be > 0
+ it 'raises error' do
+ expect { run_chain }.to raise_error(ActiveRecord::RecordNotSaved,
+ 'You cannot call create unless the parent is saved')
end
end
end
context 'when pipeline gets persisted during the process' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
+ before do
+ dependencies.each(&:perform!)
+ pipeline.save!
+ end
it 'raises error' do
- expect { run_chain }.to raise_error(described_class::PopulateError)
+ expect { step.perform! }.to raise_error(described_class::PopulateError)
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
new file mode 100644
index 00000000000..85c8e20767f
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::SeedBlock do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user, developer_projects: [project]) }
+ let(:seeds_block) { }
+
+ let(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(
+ project: project,
+ current_user: user,
+ origin_ref: 'master',
+ seeds_block: seeds_block)
+ end
+
+ let(:pipeline) { build(:ci_pipeline, project: project) }
+
+ describe '#perform!' do
+ before do
+ stub_ci_pipeline_yaml_file(YAML.dump(config))
+ end
+
+ subject(:run_chain) do
+ [
+ Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command)
+ ].map(&:perform!)
+
+ described_class.new(pipeline, command).perform!
+ end
+
+ let(:config) do
+ { rspec: { script: 'rake' } }
+ end
+
+ context 'when there is not seeds_block' do
+ it 'does nothing' do
+ expect { run_chain }.not_to raise_error
+ end
+ end
+
+ context 'when there is seeds_block' do
+ let(:seeds_block) do
+ ->(pipeline) { pipeline.variables.build(key: 'VAR', value: '123') }
+ end
+
+ it 'executes the block' do
+ run_chain
+
+ expect(pipeline.variables.size).to eq(1)
+ end
+
+ context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
+ before do
+ stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
+ end
+
+ it 'does not execute the block' do
+ run_chain
+
+ expect(pipeline.variables.size).to eq(0)
+ end
+ end
+ end
+
+ context 'when the seeds_block tries to save the pipelie' do
+ let(:seeds_block) do
+ ->(pipeline) { pipeline.save! }
+ end
+
+ it 'raises error' do
+ expect { run_chain }.to raise_error('Pipeline cannot be persisted by `seeds_block`')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index f83cd49d780..d849c768a3c 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -5,22 +5,14 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
let(:project) { create(:project, :repository) }
let(:user) { create(:user, developer_projects: [project]) }
+ let(:seeds_block) { }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
project: project,
current_user: user,
origin_ref: 'master',
- seeds_block: nil)
- end
-
- def run_chain(pipeline, command)
- [
- Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
- Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command)
- ].map(&:perform!)
-
- described_class.new(pipeline, command).perform!
+ seeds_block: seeds_block)
end
let(:pipeline) { build(:ci_pipeline, project: project) }
@@ -28,22 +20,36 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
describe '#perform!' do
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
- run_chain(pipeline, command)
end
let(:config) do
{ rspec: { script: 'rake' } }
end
+ subject(:run_chain) do
+ [
+ Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command)
+ ].map(&:perform!)
+
+ described_class.new(pipeline, command).perform!
+ end
+
it 'allocates next IID' do
+ run_chain
+
expect(pipeline.iid).to be_present
end
it 'ensures ci_ref' do
+ run_chain
+
expect(pipeline.ci_ref).to be_present
end
it 'sets the seeds in the command object' do
+ run_chain
+
expect(command.stage_seeds).to all(be_a Gitlab::Ci::Pipeline::Seed::Base)
expect(command.stage_seeds.count).to eq 1
end
@@ -58,6 +64,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'correctly fabricates a stage seeds object' do
+ run_chain
+
seeds = command.stage_seeds
expect(seeds.size).to eq 2
expect(seeds.first.attributes[:name]).to eq 'test'
@@ -81,6 +89,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns stage seeds only assigned to master' do
+ run_chain
+
seeds = command.stage_seeds
expect(seeds.size).to eq 1
@@ -100,6 +110,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns stage seeds only assigned to schedules' do
+ run_chain
+
seeds = command.stage_seeds
expect(seeds.size).to eq 1
@@ -127,6 +139,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
let(:pipeline) { build(:ci_pipeline, project: project) }
it 'returns seeds for kubernetes dependent job' do
+ run_chain
+
seeds = command.stage_seeds
expect(seeds.size).to eq 2
@@ -138,6 +152,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
context 'when kubernetes is not active' do
it 'does not return seeds for kubernetes dependent job' do
+ run_chain
+
seeds = command.stage_seeds
expect(seeds.size).to eq 1
@@ -155,11 +171,39 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns stage seeds only when variables expression is truthy' do
+ run_chain
+
seeds = command.stage_seeds
expect(seeds.size).to eq 1
expect(seeds.dig(0, 0, :name)).to eq 'unit'
end
end
+
+ context 'when there is seeds_block' do
+ let(:seeds_block) do
+ ->(pipeline) { pipeline.variables.build(key: 'VAR', value: '123') }
+ end
+
+ context 'when FF ci_seed_block_run_before_workflow_rules is enabled' do
+ it 'does not execute the block' do
+ run_chain
+
+ expect(pipeline.variables.size).to eq(0)
+ end
+ end
+
+ context 'when FF ci_seed_block_run_before_workflow_rules is disabled' do
+ before do
+ stub_feature_flags(ci_seed_block_run_before_workflow_rules: false)
+ end
+
+ it 'executes the block' do
+ run_chain
+
+ expect(pipeline.variables.size).to eq(1)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
index 0c8a0de2f34..e62bf042fba 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
@@ -16,20 +16,37 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
subject { seed.to_resource }
shared_examples_for 'returning a correct environment' do
+ let(:expected_auto_stop_in_seconds) do
+ if expected_auto_stop_in
+ ChronicDuration.parse(expected_auto_stop_in).seconds
+ end
+ end
+
it 'returns a persisted environment object' do
- expect { subject }.to change { Environment.count }.by(1)
+ freeze_time do
+ expect { subject }.to change { Environment.count }.by(1)
- expect(subject).to be_a(Environment)
- expect(subject).to be_persisted
- expect(subject.project).to eq(project)
- expect(subject.name).to eq(expected_environment_name)
+ expect(subject).to be_a(Environment)
+ expect(subject).to be_persisted
+ expect(subject.project).to eq(project)
+ expect(subject.name).to eq(expected_environment_name)
+ expect(subject.auto_stop_in).to eq(expected_auto_stop_in_seconds)
+ end
end
context 'when environment has already existed' do
- let!(:environment) { create(:environment, project: project, name: expected_environment_name) }
+ let!(:environment) do
+ create(:environment,
+ project: project,
+ name: expected_environment_name
+ ).tap do |env|
+ env.auto_stop_in = expected_auto_stop_in
+ end
+ end
it 'returns the existing environment object' do
expect { subject }.not_to change { Environment.count }
+ expect { subject }.not_to change { environment.auto_stop_at }
expect(subject).to be_persisted
expect(subject).to eq(environment)
@@ -37,9 +54,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
end
end
- context 'when job has environment attribute' do
+ context 'when job has environment name attribute' do
let(:environment_name) { 'production' }
let(:expected_environment_name) { 'production' }
+ let(:expected_auto_stop_in) { nil }
let(:attributes) do
{
@@ -49,11 +67,41 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
end
it_behaves_like 'returning a correct environment'
+
+ context 'and job environment also has an auto_stop_in attribute' do
+ let(:environment_auto_stop_in) { '5 minutes' }
+ let(:expected_auto_stop_in) { '5 minutes' }
+
+ let(:attributes) do
+ {
+ environment: environment_name,
+ options: {
+ environment: {
+ name: environment_name,
+ auto_stop_in: environment_auto_stop_in
+ }
+ }
+ }
+ end
+
+ it_behaves_like 'returning a correct environment'
+
+ context 'but the environment auto_stop_in on create flag is disabled' do
+ let(:expected_auto_stop_in) { nil }
+
+ before do
+ stub_feature_flags(environment_auto_stop_start_on_create: false)
+ end
+
+ it_behaves_like 'returning a correct environment'
+ end
+ end
end
context 'when job starts a review app' do
let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
let(:expected_environment_name) { "review/#{job.ref}" }
+ let(:expected_auto_stop_in) { nil }
let(:attributes) do
{
@@ -68,6 +116,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
context 'when job stops a review app' do
let(:environment_name) { 'review/$CI_COMMIT_REF_NAME' }
let(:expected_environment_name) { "review/#{job.ref}" }
+ let(:expected_auto_stop_in) { nil }
let(:attributes) do
{
diff --git a/spec/lib/gitlab/ci/reports/test_case_spec.rb b/spec/lib/gitlab/ci/reports/test_case_spec.rb
index a142846fc18..668a475514e 100644
--- a/spec/lib/gitlab/ci/reports/test_case_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_case_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::TestCase do
+RSpec.describe Gitlab::Ci::Reports::TestCase, :aggregate_failures do
describe '#initialize' do
let(:test_case) { described_class.new(params) }
context 'when required params are given' do
let(:job) { build(:ci_build) }
- let(:params) { attributes_for(:test_case).merge!(job: job) }
+ let(:params) { attributes_for(:report_test_case).merge!(job: job) }
it 'initializes an instance', :aggregate_failures do
expect { test_case }.not_to raise_error
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Ci::Reports::TestCase do
shared_examples 'param is missing' do |param|
let(:job) { build(:ci_build) }
- let(:params) { attributes_for(:test_case).merge!(job: job) }
+ let(:params) { attributes_for(:report_test_case).merge!(job: job) }
it 'raises an error' do
params.delete(param)
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::Ci::Reports::TestCase do
context 'when attachment is present' do
let_it_be(:job) { create(:ci_build) }
- let(:attachment_test_case) { build(:test_case, :failed_with_attachment, job: job) }
+ let(:attachment_test_case) { build(:report_test_case, :failed_with_attachment, job: job) }
it "initializes the attachment if present" do
expect(attachment_test_case.attachment).to eq("some/path.png")
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Ci::Reports::TestCase do
end
context 'when attachment is missing' do
- let(:test_case) { build(:test_case) }
+ let(:test_case) { build(:report_test_case) }
it '#has_attachment?' do
expect(test_case.has_attachment?).to be_falsy
@@ -82,4 +82,17 @@ RSpec.describe Gitlab::Ci::Reports::TestCase do
end
end
end
+
+ describe '#set_recent_failures' do
+ it 'sets the recent_failures information' do
+ test_case = build(:report_test_case)
+
+ test_case.set_recent_failures(1, 'master')
+
+ expect(test_case.recent_failures).to eq(
+ count: 1,
+ base_branch: 'master'
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
new file mode 100644
index 00000000000..8df34eddffd
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::TestFailureHistory, :aggregate_failures do
+ include TestReportsHelper
+
+ describe '#load!' do
+ let_it_be(:project) { create(:project) }
+ let(:failed_rspec) { create_test_case_rspec_failed }
+ let(:failed_java) { create_test_case_java_failed }
+
+ subject(:load_history) { described_class.new([failed_rspec, failed_java], project).load! }
+
+ before do
+ allow(Ci::TestCaseFailure)
+ .to receive(:recent_failures_count)
+ .with(project: project, test_case_keys: [failed_rspec.key, failed_java.key])
+ .and_return(
+ failed_rspec.key => 2,
+ failed_java.key => 1
+ )
+ end
+
+ it 'sets the recent failures for each matching failed test case in all test suites' do
+ load_history
+
+ expect(failed_rspec.recent_failures).to eq(count: 2, base_branch: 'master')
+ expect(failed_java.recent_failures).to eq(count: 1, base_branch: 'master')
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(test_failure_history: false)
+ end
+
+ it 'does not set recent failures' do
+ load_history
+
+ expect(failed_rspec.recent_failures).to be_nil
+ expect(failed_java.recent_failures).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/test_reports_spec.rb b/spec/lib/gitlab/ci/reports/test_reports_spec.rb
index 502859852f2..24c00de3731 100644
--- a/spec/lib/gitlab/ci/reports/test_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_reports_spec.rb
@@ -110,7 +110,7 @@ RSpec.describe Gitlab::Ci::Reports::TestReports do
end
describe '#with_attachment' do
- let(:test_case) { build(:test_case, :failed) }
+ let(:test_case) { build(:report_test_case, :failed) }
subject { test_reports.with_attachment! }
@@ -126,8 +126,8 @@ RSpec.describe Gitlab::Ci::Reports::TestReports do
end
context 'when test suites contain an attachment' do
- let(:test_case_succes) { build(:test_case) }
- let(:test_case_with_attachment) { build(:test_case, :failed_with_attachment) }
+ let(:test_case_succes) { build(:report_test_case) }
+ let(:test_case_with_attachment) { build(:report_test_case, :failed_with_attachment) }
before do
test_reports.get_suite('rspec').add_test_case(test_case_succes)
diff --git a/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb
index 6bb6771678a..c44d32ddb7d 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_comparer_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
+RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer, :aggregate_failures do
include TestReportsHelper
let(:comparer) { described_class.new(name, base_suite, head_suite) }
- let(:name) { 'rpsec' }
+ let(:name) { 'rspec' }
let(:base_suite) { Gitlab::Ci::Reports::TestSuite.new(name) }
let(:head_suite) { Gitlab::Ci::Reports::TestSuite.new(name) }
let(:test_case_success) { create_test_case_java_success }
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
describe '#new_failures' do
subject { comparer.new_failures }
- context 'when head sutie has a newly failed test case which does not exist in base' do
+ context 'when head suite has a newly failed test case which does not exist in base' do
before do
base_suite.add_test_case(test_case_success)
head_suite.add_test_case(test_case_failed)
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
- context 'when head sutie still has a failed test case which failed in base' do
+ context 'when head suite still has a failed test case which failed in base' do
before do
base_suite.add_test_case(test_case_failed)
head_suite.add_test_case(test_case_failed)
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
- context 'when head sutie has a success test case which failed in base' do
+ context 'when head suite has a success test case which failed in base' do
before do
base_suite.add_test_case(test_case_failed)
head_suite.add_test_case(test_case_success)
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
describe '#existing_failures' do
subject { comparer.existing_failures }
- context 'when head sutie has a newly failed test case which does not exist in base' do
+ context 'when head suite has a newly failed test case which does not exist in base' do
before do
base_suite.add_test_case(test_case_success)
head_suite.add_test_case(test_case_failed)
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
- context 'when head sutie still has a failed test case which failed in base' do
+ context 'when head suite still has a failed test case which failed in base' do
before do
base_suite.add_test_case(test_case_failed)
head_suite.add_test_case(test_case_failed)
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
- context 'when head sutie has a success test case which failed in base' do
+ context 'when head suite has a success test case which failed in base' do
before do
base_suite.add_test_case(test_case_failed)
head_suite.add_test_case(test_case_success)
@@ -90,7 +90,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
describe '#resolved_failures' do
subject { comparer.resolved_failures }
- context 'when head sutie has a newly failed test case which does not exist in base' do
+ context 'when head suite has a newly failed test case which does not exist in base' do
before do
base_suite.add_test_case(test_case_success)
head_suite.add_test_case(test_case_failed)
@@ -105,7 +105,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
- context 'when head sutie still has a failed test case which failed in base' do
+ context 'when head suite still has a failed test case which failed in base' do
before do
base_suite.add_test_case(test_case_failed)
head_suite.add_test_case(test_case_failed)
@@ -120,7 +120,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
- context 'when head sutie has a success test case which failed in base' do
+ context 'when head suite has a success test case which failed in base' do
before do
base_suite.add_test_case(test_case_failed)
head_suite.add_test_case(test_case_success)
@@ -347,4 +347,128 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteComparer do
end
end
end
+
+ describe '#limited_tests' do
+ subject(:limited_tests) { comparer.limited_tests }
+
+ context 'limits amount of tests returned' do
+ before do
+ stub_const("#{described_class}::DEFAULT_MAX_TESTS", 2)
+ stub_const("#{described_class}::DEFAULT_MIN_TESTS", 1)
+ end
+
+ context 'prefers new over existing and resolved' do
+ before do
+ 3.times { add_new_failure }
+ 3.times { add_new_error }
+ 3.times { add_existing_failure }
+ 3.times { add_existing_error }
+ 3.times { add_resolved_failure }
+ 3.times { add_resolved_error }
+ end
+
+ it 'returns 2 of each new category, and 1 of each resolved and existing' do
+ expect(limited_tests.new_failures.count).to eq(2)
+ expect(limited_tests.new_errors.count).to eq(2)
+ expect(limited_tests.existing_failures.count).to eq(1)
+ expect(limited_tests.existing_errors.count).to eq(1)
+ expect(limited_tests.resolved_failures.count).to eq(1)
+ expect(limited_tests.resolved_errors.count).to eq(1)
+ end
+
+ it 'does not affect the overall count' do
+ expect(summary).to include(total: 18, resolved: 6, failed: 6, errored: 6)
+ end
+ end
+
+ context 'prefers existing over resolved' do
+ before do
+ 3.times { add_existing_failure }
+ 3.times { add_existing_error }
+ 3.times { add_resolved_failure }
+ 3.times { add_resolved_error }
+ end
+
+ it 'returns 2 of each existing category, and 1 of each resolved' do
+ expect(limited_tests.new_failures.count).to eq(0)
+ expect(limited_tests.new_errors.count).to eq(0)
+ expect(limited_tests.existing_failures.count).to eq(2)
+ expect(limited_tests.existing_errors.count).to eq(2)
+ expect(limited_tests.resolved_failures.count).to eq(1)
+ expect(limited_tests.resolved_errors.count).to eq(1)
+ end
+
+ it 'does not affect the overall count' do
+ expect(summary).to include(total: 12, resolved: 6, failed: 3, errored: 3)
+ end
+ end
+
+ context 'limits amount of resolved' do
+ before do
+ 3.times { add_resolved_failure }
+ 3.times { add_resolved_error }
+ end
+
+ it 'returns 2 of each resolved category' do
+ expect(limited_tests.new_failures.count).to eq(0)
+ expect(limited_tests.new_errors.count).to eq(0)
+ expect(limited_tests.existing_failures.count).to eq(0)
+ expect(limited_tests.existing_errors.count).to eq(0)
+ expect(limited_tests.resolved_failures.count).to eq(2)
+ expect(limited_tests.resolved_errors.count).to eq(2)
+ end
+
+ it 'does not affect the overall count' do
+ expect(summary).to include(total: 6, resolved: 6, failed: 0, errored: 0)
+ end
+ end
+ end
+
+ def summary
+ {
+ total: comparer.total_count,
+ resolved: comparer.resolved_count,
+ failed: comparer.failed_count,
+ errored: comparer.error_count
+ }
+ end
+
+ def add_new_failure
+ failed_case = create_test_case_rspec_failed(SecureRandom.hex)
+ head_suite.add_test_case(failed_case)
+ end
+
+ def add_new_error
+ error_case = create_test_case_rspec_error(SecureRandom.hex)
+ head_suite.add_test_case(error_case)
+ end
+
+ def add_existing_failure
+ failed_case = create_test_case_rspec_failed(SecureRandom.hex)
+ base_suite.add_test_case(failed_case)
+ head_suite.add_test_case(failed_case)
+ end
+
+ def add_existing_error
+ error_case = create_test_case_rspec_error(SecureRandom.hex)
+ base_suite.add_test_case(error_case)
+ head_suite.add_test_case(error_case)
+ end
+
+ def add_resolved_failure
+ case_name = SecureRandom.hex
+ failed_case = create_test_case_java_failed(case_name)
+ success_case = create_test_case_java_success(case_name)
+ base_suite.add_test_case(failed_case)
+ head_suite.add_test_case(success_case)
+ end
+
+ def add_resolved_error
+ case_name = SecureRandom.hex
+ error_case = create_test_case_java_error(case_name)
+ success_case = create_test_case_java_success(case_name)
+ base_suite.add_test_case(error_case)
+ head_suite.add_test_case(success_case)
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index 50d1595da73..1d6b39a7831 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
subject { test_suite.with_attachment! }
context 'when test cases do not contain an attachment' do
- let(:test_case) { build(:test_case, :failed)}
+ let(:test_case) { build(:report_test_case, :failed)}
before do
test_suite.add_test_case(test_case)
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
end
context 'when test cases contain an attachment' do
- let(:test_case_with_attachment) { build(:test_case, :failed_with_attachment)}
+ let(:test_case_with_attachment) { build(:report_test_case, :failed_with_attachment)}
before do
test_suite.add_test_case(test_case_with_attachment)
diff --git a/spec/lib/gitlab/ci/runner_instructions_spec.rb b/spec/lib/gitlab/ci/runner_instructions_spec.rb
index 32ee2ceb040..d1020026fe6 100644
--- a/spec/lib/gitlab/ci/runner_instructions_spec.rb
+++ b/spec/lib/gitlab/ci/runner_instructions_spec.rb
@@ -75,6 +75,13 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do
with_them do
let(:params) { { os: os, arch: arch } }
+ around do |example|
+ # puma in production does not run from Rails.root, ensure file loading does not assume this
+ Dir.chdir(Rails.root.join('tmp').to_s) do
+ example.run
+ end
+ end
+
it 'returns string containing correct params' do
result = subject.install_script
diff --git a/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..4be92e8608e
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/AWS/deploy_ecs_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Deploy-ECS.gitlab-ci.yml' do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('AWS/Deploy-ECS') }
+
+ describe 'the created pipeline' do
+ let_it_be(:user) { create(:admin) }
+ let(:default_branch) { 'master' }
+ let(:pipeline_branch) { default_branch }
+ let(:project) { create(:project, :auto_devops, :custom_repo, files: { 'README.md' => '' }) }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+ let(:platform_target) { 'ECS' }
+
+ before do
+ create(:ci_variable, project: project, key: 'AUTO_DEVOPS_PLATFORM_TARGET', value: platform_target)
+ stub_ci_pipeline_yaml_file(template.content)
+ allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ allow(project).to receive(:default_branch).and_return(default_branch)
+ end
+
+ shared_examples 'no pipeline yaml error' do
+ it 'does not have any error' do
+ expect(pipeline.has_yaml_errors?).to be_falsey
+ end
+ end
+
+ it_behaves_like 'no pipeline yaml error'
+
+ it 'creates the expected jobs' do
+ expect(build_names).to include('production_ecs')
+ end
+
+ context 'when running a pipeline for a branch' do
+ let(:pipeline_branch) { 'test_branch' }
+
+ before do
+ project.repository.create_branch(pipeline_branch)
+ end
+
+ it_behaves_like 'no pipeline yaml error'
+
+ it 'creates the expected jobs' do
+ expect(build_names).to include('review_ecs', 'stop_review_ecs')
+ end
+
+ context 'when deploying to ECS Fargate' do
+ let(:platform_target) { 'FARGATE' }
+
+ it 'creates the expected jobs' do
+ expect(build_names).to include('review_fargate', 'stop_review_fargate')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index 4d90e7ca9e6..793df55f45d 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -94,14 +94,14 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
end
it 'creates an ECS deployment job for review only' do
- expect(review_prod_build_names).to contain_exactly('review_ecs')
+ expect(review_prod_build_names).to contain_exactly('review_ecs', 'stop_review_ecs')
end
context 'with FARGATE as a launch type' do
let(:platform_value) { 'FARGATE' }
it 'creates an FARGATE deployment job for review only' do
- expect(review_prod_build_names).to contain_exactly('review_fargate')
+ expect(review_prod_build_names).to contain_exactly('review_fargate', 'stop_review_fargate')
end
end
end
@@ -122,6 +122,15 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
end
end
end
+
+ context 'when the platform target is EC2' do
+ let(:platform_value) { 'EC2' }
+
+ it 'contains the build_artifact job, not the build job' do
+ expect(build_names).to include('build_artifact')
+ expect(build_names).not_to include('build')
+ end
+ end
end
context 'when the project has no active cluster' do
diff --git a/spec/lib/gitlab/ci/variables/collection/item_spec.rb b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
index eba2f29836d..2e43f22830a 100644
--- a/spec/lib/gitlab/ci/variables/collection/item_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection/item_spec.rb
@@ -15,14 +15,14 @@ RSpec.describe Gitlab::Ci::Variables::Collection::Item do
context 'when unknown keyword is specified' do
it 'raises error' do
expect { described_class.new(key: variable_key, value: 'abc', files: true) }
- .to raise_error ArgumentError, 'unknown keyword: files'
+ .to raise_error ArgumentError, 'unknown keyword: :files'
end
end
context 'when required keywords are not specified' do
it 'raises error' do
expect { described_class.new(key: variable_key) }
- .to raise_error ArgumentError, 'missing keyword: value'
+ .to raise_error ArgumentError, 'missing keyword: :value'
end
end
diff --git a/spec/lib/gitlab/config/entry/simplifiable_spec.rb b/spec/lib/gitlab/config/entry/simplifiable_spec.rb
index 2011587a342..f9088130037 100644
--- a/spec/lib/gitlab/config/entry/simplifiable_spec.rb
+++ b/spec/lib/gitlab/config/entry/simplifiable_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Config::Entry::Simplifiable do
end
it 'attemps to load a first strategy' do
- expect(first).to receive(:new).with('something', anything)
+ expect(first).to receive(:new).with('something')
entry.new('something')
end
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::Config::Entry::Simplifiable do
end
it 'attemps to load a second strategy' do
- expect(second).to receive(:new).with('test', anything)
+ expect(second).to receive(:new).with('test')
entry.new('test')
end
@@ -68,7 +68,7 @@ RSpec.describe Gitlab::Config::Entry::Simplifiable do
end
it 'instantiates an unknown strategy' do
- expect(unknown).to receive(:new).with('test', anything)
+ expect(unknown).to receive(:new).with('test')
entry.new('test')
end
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index 80bd517ec92..0de944d3f8a 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -93,6 +93,51 @@ RSpec.describe Gitlab::Conflict::File do
end
end
+ describe '#diff_lines_for_serializer' do
+ let(:diff_line_types) { conflict_file.diff_lines_for_serializer.map(&:type) }
+
+ it 'assigns conflict types to the diff lines' do
+ expect(diff_line_types[4]).to eq('conflict_marker')
+ expect(diff_line_types[5..10]).to eq(['conflict_marker_our'] * 6)
+ expect(diff_line_types[11]).to eq('conflict_marker')
+ expect(diff_line_types[12..17]).to eq(['conflict_marker_their'] * 6)
+ expect(diff_line_types[18]).to eq('conflict_marker')
+
+ expect(diff_line_types[19..24]).to eq([nil] * 6)
+
+ expect(diff_line_types[25]).to eq('conflict_marker')
+ expect(diff_line_types[26..27]).to eq(['conflict_marker_our'] * 2)
+ expect(diff_line_types[28]).to eq('conflict_marker')
+ expect(diff_line_types[29..30]).to eq(['conflict_marker_their'] * 2)
+ expect(diff_line_types[31]).to eq('conflict_marker')
+ end
+
+ it 'does not add a match line to the end of the section' do
+ expect(diff_line_types.last).to eq(nil)
+ end
+
+ context 'when there are unchanged trailing lines' do
+ let(:rugged_conflict) { index.conflicts.first }
+ let(:raw_conflict_content) { index.merge_file('files/ruby/popen.rb')[:data] }
+
+ it 'assign conflict types and adds match line to the end of the section' do
+ expect(diff_line_types).to eq([
+ 'match',
+ nil, nil, nil,
+ "conflict_marker",
+ "conflict_marker_our",
+ "conflict_marker",
+ "conflict_marker_their",
+ "conflict_marker_their",
+ "conflict_marker_their",
+ "conflict_marker",
+ nil, nil, nil,
+ "match"
+ ])
+ end
+ end
+ end
+
describe '#sections' do
it 'only inserts match lines when there is a gap between sections' do
conflict_file.sections.each_with_index do |section, i|
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index a31f34d82d7..2c5988f06b2 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'cycle analytics events', :aggregate_failures do
+RSpec.describe 'value stream analytics events', :aggregate_failures do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, :admin) }
let(:from_date) { 10.days.ago }
diff --git a/spec/lib/gitlab/danger/commit_linter_spec.rb b/spec/lib/gitlab/danger/commit_linter_spec.rb
index 882cede759b..ebfeedba700 100644
--- a/spec/lib/gitlab/danger/commit_linter_spec.rb
+++ b/spec/lib/gitlab/danger/commit_linter_spec.rb
@@ -190,7 +190,9 @@ RSpec.describe Gitlab::Danger::CommitLinter do
[
'[ci skip] A commit message',
'[Ci skip] A commit message',
- '[API] A commit message'
+ '[API] A commit message',
+ 'api: A commit message',
+ 'API: A commit message'
].each do |message|
context "when subject is '#{message}'" do
let(:commit_message) { message }
@@ -207,6 +209,9 @@ RSpec.describe Gitlab::Danger::CommitLinter do
'[ci skip]A commit message',
'[Ci skip] A commit message',
'[ci skip] a commit message',
+ 'API: a commit message',
+ 'API: a commit message',
+ 'api: a commit message',
'! A commit message'
].each do |message|
context "when subject is '#{message}'" do
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index 509649f08c6..f400641706d 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -236,13 +236,16 @@ RSpec.describe Gitlab::Danger::Helper do
'.gitlab/ci/frontend.gitlab-ci.yml' | %i[frontend engineering_productivity]
- 'app/models/foo' | [:backend]
- 'bin/foo' | [:backend]
- 'config/foo' | [:backend]
- 'lib/foo' | [:backend]
- 'rubocop/foo' | [:backend]
- 'spec/foo' | [:backend]
- 'spec/foo/bar' | [:backend]
+ 'app/models/foo' | [:backend]
+ 'bin/foo' | [:backend]
+ 'config/foo' | [:backend]
+ 'lib/foo' | [:backend]
+ 'rubocop/foo' | [:backend]
+ '.rubocop.yml' | [:backend]
+ '.rubocop_todo.yml' | [:backend]
+ '.rubocop_manual_todo.yml' | [:backend]
+ 'spec/foo' | [:backend]
+ 'spec/foo/bar' | [:backend]
'ee/app/foo' | [:backend]
'ee/bin/foo' | [:backend]
@@ -278,9 +281,9 @@ RSpec.describe Gitlab::Danger::Helper do
'scripts/foo' | [:engineering_productivity]
'lib/gitlab/danger/foo' | [:engineering_productivity]
'ee/lib/gitlab/danger/foo' | [:engineering_productivity]
- '.overcommit.yml.example' | [:engineering_productivity]
+ 'lefthook.yml' | [:engineering_productivity]
'.editorconfig' | [:engineering_productivity]
- 'tooling/overcommit/foo' | [:engineering_productivity]
+ 'tooling/bin/find_foss_tests' | [:engineering_productivity]
'.codeclimate.yml' | [:engineering_productivity]
'.gitlab/CODEOWNERS' | [:engineering_productivity]
@@ -312,6 +315,8 @@ RSpec.describe Gitlab::Danger::Helper do
'db/fixtures/foo.rb' | [:backend]
'ee/db/fixtures/foo.rb' | [:backend]
+ 'doc/api/graphql/reference/gitlab_schema.graphql' | [:backend]
+ 'doc/api/graphql/reference/gitlab_schema.json' | [:backend]
'qa/foo' | [:qa]
'ee/qa/foo' | [:qa]
diff --git a/spec/lib/gitlab/data_builder/feature_flag_spec.rb b/spec/lib/gitlab/data_builder/feature_flag_spec.rb
new file mode 100644
index 00000000000..75511fcf9f5
--- /dev/null
+++ b/spec/lib/gitlab/data_builder/feature_flag_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DataBuilder::FeatureFlag do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:feature_flag) { create(:operations_feature_flag, project: project) }
+
+ describe '.build' do
+ let(:data) { described_class.build(feature_flag, user) }
+
+ it { expect(data).to be_a(Hash) }
+ it { expect(data[:object_kind]).to eq('feature_flag') }
+
+ it 'contains the correct object attributes' do
+ object_attributes = data[:object_attributes]
+
+ expect(object_attributes[:id]).to eq(feature_flag.id)
+ expect(object_attributes[:name]).to eq(feature_flag.name)
+ expect(object_attributes[:description]).to eq(feature_flag.description)
+ expect(object_attributes[:active]).to eq(feature_flag.active)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 31a8b4afa03..a1cc759e011 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -141,6 +141,29 @@ RSpec.describe Gitlab::Database::BatchCount do
described_class.batch_count(model)
end
+ it 'does not use BETWEEN to define the range' do
+ batch_size = Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE + 1
+ issue = nil
+
+ travel_to(Date.tomorrow) do
+ issue = create(:issue) # created_at: 00:00:00
+ create(:issue, created_at: issue.created_at + batch_size - 0.5) # created_at: 00:20:50.5
+ create(:issue, created_at: issue.created_at + batch_size) # created_at: 00:20:51
+ end
+
+ # When using BETWEEN, the range condition looks like:
+ # Batch 1: WHERE "issues"."created_at" BETWEEN "2020-10-09 00:00:00" AND "2020-10-09 00:20:50"
+ # Batch 2: WHERE "issues"."created_at" BETWEEN "2020-10-09 00:20:51" AND "2020-10-09 00:41:41"
+ # We miss the issue created at 00:20:50.5 because we prevent the batches from overlapping (start..(finish - 1))
+ # See https://wiki.postgresql.org/wiki/Don't_Do_This#Don.27t_use_BETWEEN_.28especially_with_timestamps.29
+
+ # When using >= AND <, we eliminate any gaps between batches (start...finish)
+ # This is useful when iterating over a timestamp column
+ # Batch 1: WHERE "issues"."created_at" >= "2020-10-09 00:00:00" AND "issues"."created_at" < "2020-10-09 00:20:51"
+ # Batch 1: WHERE "issues"."created_at" >= "2020-10-09 00:20:51" AND "issues"."created_at" < "2020-10-09 00:41:42"
+ expect(described_class.batch_count(model, :created_at, batch_size: batch_size, start: issue.created_at)).to eq(3)
+ end
+
it_behaves_like 'when a transaction is open' do
subject { described_class.batch_count(model) }
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index a8edcc5f7e5..ff6e5437559 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1680,7 +1680,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
has_internal_id :iid,
scope: :project,
- init: ->(s) { s&.project&.issues&.maximum(:iid) },
+ init: ->(s, _scope) { s&.project&.issues&.maximum(:iid) },
backfill: true,
presence: false
end
diff --git a/spec/lib/gitlab/database/partitioning/replace_table_spec.rb b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
new file mode 100644
index 00000000000..d47666eeffd
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::ReplaceTable, '#perform' do
+ include TableSchemaHelpers
+
+ subject(:replace_table) { described_class.new(original_table, replacement_table, archived_table, 'id').perform }
+
+ let(:original_table) { '_test_original_table' }
+ let(:replacement_table) { '_test_replacement_table' }
+ let(:archived_table) { '_test_archived_table' }
+
+ let(:original_sequence) { "#{original_table}_id_seq" }
+
+ let(:original_primary_key) { "#{original_table}_pkey" }
+ let(:replacement_primary_key) { "#{replacement_table}_pkey" }
+ let(:archived_primary_key) { "#{archived_table}_pkey" }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{original_table} (
+ id serial NOT NULL PRIMARY KEY,
+ original_column text NOT NULL,
+ created_at timestamptz NOT NULL);
+
+ CREATE TABLE #{replacement_table} (
+ id int NOT NULL,
+ replacement_column text NOT NULL,
+ created_at timestamptz NOT NULL,
+ PRIMARY KEY (id, created_at))
+ PARTITION BY RANGE (created_at);
+ SQL
+ end
+
+ it 'replaces the current table, archiving the old' do
+ expect_table_to_be_replaced { replace_table }
+ end
+
+ it 'transfers the primary key sequence to the replacement table' do
+ expect(sequence_owned_by(original_table, 'id')).to eq(original_sequence)
+ expect(default_expression_for(original_table, 'id')).to eq("nextval('#{original_sequence}'::regclass)")
+
+ expect(sequence_owned_by(replacement_table, 'id')).to be_nil
+ expect(default_expression_for(replacement_table, 'id')).to be_nil
+
+ expect_table_to_be_replaced { replace_table }
+
+ expect(sequence_owned_by(original_table, 'id')).to eq(original_sequence)
+ expect(default_expression_for(original_table, 'id')).to eq("nextval('#{original_sequence}'::regclass)")
+ expect(sequence_owned_by(archived_table, 'id')).to be_nil
+ expect(default_expression_for(archived_table, 'id')).to be_nil
+ end
+
+ it 'renames the primary key constraints to match the new table names' do
+ expect_primary_keys_after_tables([original_table, replacement_table])
+
+ expect_table_to_be_replaced { replace_table }
+
+ expect_primary_keys_after_tables([original_table, archived_table])
+ end
+
+ context 'when the table has partitions' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE gitlab_partitions_dynamic.#{replacement_table}_202001 PARTITION OF #{replacement_table}
+ FOR VALUES FROM ('2020-01-01') TO ('2020-02-01');
+
+ CREATE TABLE gitlab_partitions_dynamic.#{replacement_table}_202002 PARTITION OF #{replacement_table}
+ FOR VALUES FROM ('2020-02-01') TO ('2020-03-01');
+ SQL
+ end
+
+ it 'renames the partitions to match the new table name' do
+ expect(partitions_for_parent_table(original_table).count).to eq(0)
+ expect(partitions_for_parent_table(replacement_table).count).to eq(2)
+
+ expect_table_to_be_replaced { replace_table }
+
+ expect(partitions_for_parent_table(archived_table).count).to eq(0)
+
+ partitions = partitions_for_parent_table(original_table).all
+
+ expect(partitions.size).to eq(2)
+
+ expect(partitions[0]).to have_attributes(
+ identifier: "gitlab_partitions_dynamic.#{original_table}_202001",
+ condition: "FOR VALUES FROM ('2020-01-01 00:00:00+00') TO ('2020-02-01 00:00:00+00')")
+
+ expect(partitions[1]).to have_attributes(
+ identifier: "gitlab_partitions_dynamic.#{original_table}_202002",
+ condition: "FOR VALUES FROM ('2020-02-01 00:00:00+00') TO ('2020-03-01 00:00:00+00')")
+ end
+
+ it 'renames the primary key constraints to match the new partition names' do
+ original_partitions = ["#{replacement_table}_202001", "#{replacement_table}_202002"]
+ expect_primary_keys_after_tables(original_partitions, schema: 'gitlab_partitions_dynamic')
+
+ expect_table_to_be_replaced { replace_table }
+
+ renamed_partitions = ["#{original_table}_202001", "#{original_table}_202002"]
+ expect_primary_keys_after_tables(renamed_partitions, schema: 'gitlab_partitions_dynamic')
+ end
+ end
+
+ def partitions_for_parent_table(table)
+ Gitlab::Database::PostgresPartition.for_parent_table(table)
+ end
+
+ def expect_table_to_be_replaced(&block)
+ super(original_table: original_table, replacement_table: replacement_table, archived_table: archived_table, &block)
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
new file mode 100644
index 00000000000..7f61ff759fc
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
@@ -0,0 +1,186 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
+ include TableSchemaHelpers
+
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ let(:table_name) { '_test_partitioned_table' }
+ let(:column_name) { 'created_at' }
+ let(:index_name) { '_test_partitioning_index_name' }
+ let(:partition_schema) { 'gitlab_partitions_dynamic' }
+ let(:partition1_identifier) { "#{partition_schema}.#{table_name}_202001" }
+ let(:partition2_identifier) { "#{partition_schema}.#{table_name}_202002" }
+ let(:partition1_index) { "index_#{table_name}_202001_#{column_name}" }
+ let(:partition2_index) { "index_#{table_name}_202002_#{column_name}" }
+
+ before do
+ allow(migration).to receive(:puts)
+
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL,
+ created_at timestamptz NOT NULL,
+ PRIMARY KEY (id, created_at)
+ ) PARTITION BY RANGE (created_at);
+
+ CREATE TABLE #{partition1_identifier} PARTITION OF #{table_name}
+ FOR VALUES FROM ('2020-01-01') TO ('2020-02-01');
+
+ CREATE TABLE #{partition2_identifier} PARTITION OF #{table_name}
+ FOR VALUES FROM ('2020-02-01') TO ('2020-03-01');
+ SQL
+ end
+
+ describe '#add_concurrent_partitioned_index' do
+ before do
+ allow(migration).to receive(:index_name_exists?).with(table_name, index_name).and_return(false)
+
+ allow(migration).to receive(:generated_index_name).and_return(partition1_index, partition2_index)
+
+ allow(migration).to receive(:with_lock_retries).and_yield
+ end
+
+ context 'when the index does not exist on the parent table' do
+ it 'creates the index on each partition, and the parent table', :aggregate_failures do
+ expect(migration).to receive(:index_name_exists?).with(table_name, index_name).and_return(false)
+
+ expect_add_concurrent_index_and_call_original(partition1_identifier, column_name, partition1_index)
+ expect_add_concurrent_index_and_call_original(partition2_identifier, column_name, partition2_index)
+
+ expect(migration).to receive(:with_lock_retries).ordered.and_yield
+ expect(migration).to receive(:add_index).with(table_name, column_name, name: index_name).ordered.and_call_original
+
+ migration.add_concurrent_partitioned_index(table_name, column_name, name: index_name)
+
+ expect_index_to_exist(partition1_index, schema: partition_schema)
+ expect_index_to_exist(partition2_index, schema: partition_schema)
+ expect_index_to_exist(index_name)
+ end
+
+ def expect_add_concurrent_index_and_call_original(table, column, index)
+ expect(migration).to receive(:add_concurrent_index).ordered.with(table, column, name: index)
+ .and_wrap_original { |_, table, column, options| connection.add_index(table, column, options) }
+ end
+ end
+
+ context 'when the index exists on the parent table' do
+ it 'does not attempt to create any indexes', :aggregate_failures do
+ expect(migration).to receive(:index_name_exists?).with(table_name, index_name).and_return(true)
+
+ expect(migration).not_to receive(:add_concurrent_index)
+ expect(migration).not_to receive(:with_lock_retries)
+ expect(migration).not_to receive(:add_index)
+
+ migration.add_concurrent_partitioned_index(table_name, column_name, name: index_name)
+ end
+ end
+
+ context 'when additional index options are given' do
+ before do
+ connection.execute(<<~SQL)
+ DROP TABLE #{partition2_identifier}
+ SQL
+ end
+
+ it 'forwards them to the index helper methods', :aggregate_failures do
+ expect(migration).to receive(:add_concurrent_index)
+ .with(partition1_identifier, column_name, name: partition1_index, where: 'x > 0', unique: true)
+
+ expect(migration).to receive(:add_index)
+ .with(table_name, column_name, name: index_name, where: 'x > 0', unique: true)
+
+ migration.add_concurrent_partitioned_index(table_name, column_name,
+ name: index_name, where: 'x > 0', unique: true)
+ end
+ end
+
+ context 'when a name argument for the index is not given' do
+ it 'raises an error', :aggregate_failures do
+ expect(migration).not_to receive(:add_concurrent_index)
+ expect(migration).not_to receive(:with_lock_retries)
+ expect(migration).not_to receive(:add_index)
+
+ expect do
+ migration.add_concurrent_partitioned_index(table_name, column_name)
+ end.to raise_error(ArgumentError, /A name is required for indexes added to partitioned tables/)
+ end
+ end
+
+ context 'when the given table is not a partitioned table' do
+ before do
+ allow(Gitlab::Database::PostgresPartitionedTable).to receive(:find_by_name_in_current_schema)
+ .with(table_name).and_return(nil)
+ end
+
+ it 'raises an error', :aggregate_failures do
+ expect(migration).not_to receive(:add_concurrent_index)
+ expect(migration).not_to receive(:with_lock_retries)
+ expect(migration).not_to receive(:add_index)
+
+ expect do
+ migration.add_concurrent_partitioned_index(table_name, column_name, name: index_name)
+ end.to raise_error(ArgumentError, /#{table_name} is not a partitioned table/)
+ end
+ end
+ end
+
+ describe '#remove_concurrent_partitioned_index_by_name' do
+ context 'when the index exists' do
+ before do
+ connection.execute(<<~SQL)
+ CREATE INDEX #{partition1_index} ON #{partition1_identifier} (#{column_name});
+ CREATE INDEX #{partition2_index} ON #{partition2_identifier} (#{column_name});
+
+ CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ it 'drops the index on the parent table, cascading to all partitions', :aggregate_failures do
+ expect_index_to_exist(partition1_index, schema: partition_schema)
+ expect_index_to_exist(partition2_index, schema: partition_schema)
+ expect_index_to_exist(index_name)
+
+ expect(migration).to receive(:with_lock_retries).ordered.and_yield
+ expect(migration).to receive(:remove_index).with(table_name, name: index_name).ordered.and_call_original
+
+ migration.remove_concurrent_partitioned_index_by_name(table_name, index_name)
+
+ expect_index_not_to_exist(partition1_index, schema: partition_schema)
+ expect_index_not_to_exist(partition2_index, schema: partition_schema)
+ expect_index_not_to_exist(index_name)
+ end
+ end
+
+ context 'when the index does not exist' do
+ it 'does not attempt to drop the index', :aggregate_failures do
+ expect(migration).to receive(:index_name_exists?).with(table_name, index_name).and_return(false)
+
+ expect(migration).not_to receive(:with_lock_retries)
+ expect(migration).not_to receive(:remove_index)
+
+ migration.remove_concurrent_partitioned_index_by_name(table_name, index_name)
+ end
+ end
+
+ context 'when the given table is not a partitioned table' do
+ before do
+ allow(Gitlab::Database::PostgresPartitionedTable).to receive(:find_by_name_in_current_schema)
+ .with(table_name).and_return(nil)
+ end
+
+ it 'raises an error', :aggregate_failures do
+ expect(migration).not_to receive(:with_lock_retries)
+ expect(migration).not_to receive(:remove_index)
+
+ expect do
+ migration.remove_concurrent_partitioned_index_by_name(table_name, index_name)
+ end.to raise_error(ArgumentError, /#{table_name} is not a partitioned table/)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 147637cf471..f10ff704c17 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers do
include PartitioningHelpers
include TriggerHelpers
+ include TableSchemaHelpers
let(:migration) do
ActiveRecord::Migration.new.extend(described_class)
@@ -629,6 +630,76 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
end
+ describe '#replace_with_partitioned_table' do
+ let(:archived_table) { "#{source_table}_archived" }
+
+ before do
+ migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+ end
+
+ it 'replaces the original table with the partitioned table' do
+ expect(table_type(source_table)).to eq('normal')
+ expect(table_type(partitioned_table)).to eq('partitioned')
+ expect(table_type(archived_table)).to be_nil
+
+ expect_table_to_be_replaced { migration.replace_with_partitioned_table(source_table) }
+
+ expect(table_type(source_table)).to eq('partitioned')
+ expect(table_type(archived_table)).to eq('normal')
+ expect(table_type(partitioned_table)).to be_nil
+ end
+
+ it 'moves the trigger from the original table to the new table' do
+ expect_function_to_exist(function_name)
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+
+ expect_table_to_be_replaced { migration.replace_with_partitioned_table(source_table) }
+
+ expect_function_to_exist(function_name)
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+ end
+
+ def expect_table_to_be_replaced(&block)
+ super(original_table: source_table, replacement_table: partitioned_table, archived_table: archived_table, &block)
+ end
+ end
+
+ describe '#rollback_replace_with_partitioned_table' do
+ let(:archived_table) { "#{source_table}_archived" }
+
+ before do
+ migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+
+ migration.replace_with_partitioned_table source_table
+ end
+
+ it 'replaces the partitioned table with the non-partitioned table' do
+ expect(table_type(source_table)).to eq('partitioned')
+ expect(table_type(archived_table)).to eq('normal')
+ expect(table_type(partitioned_table)).to be_nil
+
+ expect_table_to_be_replaced { migration.rollback_replace_with_partitioned_table(source_table) }
+
+ expect(table_type(source_table)).to eq('normal')
+ expect(table_type(partitioned_table)).to eq('partitioned')
+ expect(table_type(archived_table)).to be_nil
+ end
+
+ it 'moves the trigger from the partitioned table to the non-partitioned table' do
+ expect_function_to_exist(function_name)
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+
+ expect_table_to_be_replaced { migration.rollback_replace_with_partitioned_table(source_table) }
+
+ expect_function_to_exist(function_name)
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+ end
+
+ def expect_table_to_be_replaced(&block)
+ super(original_table: source_table, replacement_table: archived_table, archived_table: partitioned_table, &block)
+ end
+ end
+
def filter_columns_by_name(columns, names)
columns.reject { |c| names.include?(c.name) }
end
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
index 1da67a5a6c0..d65b638f7bc 100644
--- a/spec/lib/gitlab/database/postgres_index_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -3,9 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PostgresIndex do
+ let(:schema) { 'public' }
+ let(:name) { 'foo_idx' }
+ let(:identifier) { "#{schema}.#{name}" }
+
before do
ActiveRecord::Base.connection.execute(<<~SQL)
- CREATE INDEX foo_idx ON public.users (name);
+ CREATE INDEX #{name} ON public.users (name);
CREATE UNIQUE INDEX bar_key ON public.users (id);
CREATE TABLE example_table (id serial primary key);
@@ -16,19 +20,7 @@ RSpec.describe Gitlab::Database::PostgresIndex do
described_class.by_identifier(name)
end
- describe '.by_identifier' do
- it 'finds the index' do
- expect(find('public.foo_idx')).to be_a(Gitlab::Database::PostgresIndex)
- end
-
- it 'raises an error if not found' do
- expect { find('public.idontexist') }.to raise_error(ActiveRecord::RecordNotFound)
- end
-
- it 'raises ArgumentError if given a non-fully qualified index name' do
- expect { find('foo') }.to raise_error(ArgumentError, /not fully qualified/)
- end
- end
+ it_behaves_like 'a postgres model'
describe '.regular' do
it 'only non-unique indexes' do
@@ -76,7 +68,7 @@ RSpec.describe Gitlab::Database::PostgresIndex do
describe '#valid_index?' do
it 'returns true if the index is invalid' do
- expect(find('public.foo_idx')).to be_valid_index
+ expect(find(identifier)).to be_valid_index
end
it 'returns false if the index is marked as invalid' do
@@ -86,31 +78,13 @@ RSpec.describe Gitlab::Database::PostgresIndex do
WHERE pg_class.relname = 'foo_idx' AND pg_index.indexrelid = pg_class.oid
SQL
- expect(find('public.foo_idx')).not_to be_valid_index
- end
- end
-
- describe '#to_s' do
- it 'returns the index name' do
- expect(find('public.foo_idx').to_s).to eq('foo_idx')
- end
- end
-
- describe '#name' do
- it 'returns the name' do
- expect(find('public.foo_idx').name).to eq('foo_idx')
- end
- end
-
- describe '#schema' do
- it 'returns the index schema' do
- expect(find('public.foo_idx').schema).to eq('public')
+ expect(find(identifier)).not_to be_valid_index
end
end
describe '#definition' do
it 'returns the index definition' do
- expect(find('public.foo_idx').definition).to eq('CREATE INDEX foo_idx ON public.users USING btree (name)')
+ expect(find(identifier).definition).to eq('CREATE INDEX foo_idx ON public.users USING btree (name)')
end
end
end
diff --git a/spec/lib/gitlab/database/postgres_partition_spec.rb b/spec/lib/gitlab/database/postgres_partition_spec.rb
new file mode 100644
index 00000000000..5a44090d5ae
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_partition_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresPartition, type: :model do
+ let(:schema) { 'gitlab_partitions_dynamic' }
+ let(:name) { '_test_partition_01' }
+ let(:identifier) { "#{schema}.#{name}" }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE public._test_partitioned_table (
+ id serial NOT NULL,
+ created_at timestamptz NOT NULL,
+ PRIMARY KEY (id, created_at)
+ ) PARTITION BY RANGE(created_at);
+
+ CREATE TABLE #{identifier} PARTITION OF public._test_partitioned_table
+ FOR VALUES FROM ('2020-01-01') to ('2020-02-01');
+ SQL
+ end
+
+ def find(identifier)
+ described_class.by_identifier(identifier)
+ end
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:postgres_partitioned_table).with_primary_key('identifier').with_foreign_key('parent_identifier') }
+ end
+
+ it_behaves_like 'a postgres model'
+
+ describe '.for_parent_table' do
+ let(:second_name) { '_test_partition_02' }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{schema}.#{second_name} PARTITION OF public._test_partitioned_table
+ FOR VALUES FROM ('2020-02-01') to ('2020-03-01');
+
+ CREATE TABLE #{schema}._test_other_table (
+ id serial NOT NULL,
+ created_at timestamptz NOT NULL,
+ PRIMARY KEY (id, created_at)
+ ) PARTITION BY RANGE(created_at);
+
+ CREATE TABLE #{schema}._test_other_partition_01 PARTITION OF #{schema}._test_other_table
+ FOR VALUES FROM ('2020-01-01') to ('2020-02-01');
+ SQL
+ end
+
+ it 'returns partitions for the parent table in the current schema' do
+ partitions = described_class.for_parent_table('_test_partitioned_table')
+
+ expect(partitions.count).to eq(2)
+ expect(partitions.pluck(:name)).to eq([name, second_name])
+ end
+
+ it 'does not return partitions for tables not in the current schema' do
+ expect(described_class.for_parent_table('_test_other_table').count).to eq(0)
+ end
+ end
+
+ describe '#parent_identifier' do
+ it 'returns the parent table identifier' do
+ expect(find(identifier).parent_identifier).to eq('public._test_partitioned_table')
+ end
+ end
+
+ describe '#condition' do
+ it 'returns the condition for the partitioned values' do
+ expect(find(identifier).condition).to eq("FOR VALUES FROM ('2020-01-01 00:00:00+00') TO ('2020-02-01 00:00:00+00')")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/postgres_partitioned_table_spec.rb b/spec/lib/gitlab/database/postgres_partitioned_table_spec.rb
new file mode 100644
index 00000000000..21a46f1a0a6
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_partitioned_table_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresPartitionedTable, type: :model do
+ let(:schema) { 'public' }
+ let(:name) { 'foo_range' }
+ let(:identifier) { "#{schema}.#{name}" }
+
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE TABLE #{identifier} (
+ id serial NOT NULL,
+ created_at timestamptz NOT NULL,
+ PRIMARY KEY (id, created_at)
+ ) PARTITION BY RANGE(created_at);
+
+ CREATE TABLE public.foo_list (
+ id serial NOT NULL,
+ row_type text NOT NULL,
+ PRIMARY KEY (id, row_type)
+ ) PARTITION BY LIST(row_type);
+
+ CREATE TABLE public.foo_hash (
+ id serial NOT NULL,
+ row_value int NOT NULL,
+ PRIMARY KEY (id, row_value)
+ ) PARTITION BY HASH (row_value);
+ SQL
+ end
+
+ def find(identifier)
+ described_class.by_identifier(identifier)
+ end
+
+ describe 'associations' do
+ it { is_expected.to have_many(:postgres_partitions).with_primary_key('identifier').with_foreign_key('parent_identifier') }
+ end
+
+ it_behaves_like 'a postgres model'
+
+ describe '.find_by_name_in_current_schema' do
+ it 'finds the partitioned tables in the current schema by name', :aggregate_failures do
+ partitioned_table = described_class.find_by_name_in_current_schema(name)
+
+ expect(partitioned_table).not_to be_nil
+ expect(partitioned_table.identifier).to eq(identifier)
+ end
+
+ it 'does not find partitioned tables in a different schema' do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ ALTER TABLE #{identifier} SET SCHEMA gitlab_partitions_dynamic
+ SQL
+
+ expect(described_class.find_by_name_in_current_schema(name)).to be_nil
+ end
+ end
+
+ describe '#dynamic?' do
+ it 'returns true for tables partitioned by range' do
+ expect(find('public.foo_range')).to be_dynamic
+ end
+
+ it 'returns true for tables partitioned by list' do
+ expect(find('public.foo_list')).to be_dynamic
+ end
+
+ it 'returns false for tables partitioned by hash' do
+ expect(find('public.foo_hash')).not_to be_dynamic
+ end
+ end
+
+ describe '#static?' do
+ it 'returns false for tables partitioned by range' do
+ expect(find('public.foo_range')).not_to be_static
+ end
+
+ it 'returns false for tables partitioned by list' do
+ expect(find('public.foo_list')).not_to be_static
+ end
+
+ it 'returns true for tables partitioned by hash' do
+ expect(find('public.foo_hash')).to be_static
+ end
+ end
+
+ describe '#strategy' do
+ it 'returns the partitioning strategy' do
+ expect(find(identifier).strategy).to eq('range')
+ end
+ end
+
+ describe '#key_columns' do
+ it 'returns the partitioning key columns' do
+ expect(find(identifier).key_columns).to match_array(['created_at'])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 86b3c029944..359e0597f4e 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Database::Reindexing do
it 'retrieves regular indexes that are no left-overs from previous runs' do
result = double
- expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.not_match.not_match').with(no_args).with('^tmp_reindex_').with('^old_reindex_').and_return(result)
+ expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.where.not_match.not_match').with(no_args).with('NOT expression').with('^tmp_reindex_').with('^old_reindex_').and_return(result)
expect(subject).to eq(result)
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index a38fe2c51ca..2ebfb054a96 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -11,13 +11,13 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
let(:email_raw) { email_fixture('emails/service_desk.eml') }
- let_it_be(:namespace) { create(:namespace, name: "email") }
+ let_it_be(:group) { create(:group, :private, name: "email") }
let(:expected_description) do
"Service desk stuff!\n\n```\na = b\n```\n\n`/label ~label1`\n`/assign @user1`\n`/close`\n![image](uploads/image.png)"
end
context 'service desk is enabled for the project' do
- let_it_be(:project) { create(:project, :repository, :public, namespace: namespace, path: 'test', service_desk_enabled: true) }
+ let_it_be(:project) { create(:project, :repository, :private, group: group, path: 'test', service_desk_enabled: true) }
before do
allow(Gitlab::ServiceDesk).to receive(:supported?).and_return(true)
@@ -101,6 +101,18 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect(issue.milestone).to eq(milestone)
end
+ it 'applies group labels using quick actions' do
+ group_label = create(:group_label, group: project.group, title: 'label2')
+ file_content = %(Text from template \n/label ~#{group_label.title}"")
+ set_template_file('with_group_labels', file_content)
+
+ receiver.execute
+
+ issue = Issue.last
+ expect(issue.description).to include('Text from template')
+ expect(issue.label_ids).to include(group_label.id)
+ end
+
it 'redacts quick actions present on user email body' do
set_template_file('service_desk1', 'text from template')
@@ -289,7 +301,8 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
context 'service desk is disabled for the project' do
- let(:project) { create(:project, :public, namespace: namespace, path: 'test', service_desk_enabled: false) }
+ let(:group) { create(:group)}
+ let(:project) { create(:project, :public, group: group, path: 'test', service_desk_enabled: false) }
it 'bounces the email' do
expect { receiver.execute }.to raise_error(Gitlab::Email::ProcessingError)
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 2cc9ff36c99..68a46b11487 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -198,47 +198,39 @@ RSpec.describe Gitlab::ErrorTracking do
end
describe '.track_exception' do
- it 'calls Raven.capture_exception' do
- expected_extras = {
- some_other_info: 'info',
- issue_url: issue_url
- }
+ let(:extra) { { issue_url: issue_url, some_other_info: 'info' } }
- expected_tags = {
- correlation_id: 'cid'
- }
+ subject(:track_exception) { described_class.track_exception(exception, extra) }
- expect(Raven).to receive(:capture_exception)
- .with(exception,
- tags: a_hash_including(expected_tags),
- extra: a_hash_including(expected_extras))
-
- described_class.track_exception(
- exception,
- issue_url: issue_url,
- some_other_info: 'info'
- )
+ before do
+ allow(Raven).to receive(:capture_exception).and_call_original
+ allow(Gitlab::ErrorTracking::Logger).to receive(:error)
+ end
+
+ it 'calls Raven.capture_exception' do
+ track_exception
+
+ expect(Raven).to have_received(:capture_exception)
+ .with(exception,
+ tags: a_hash_including(correlation_id: 'cid'),
+ extra: a_hash_including(some_other_info: 'info', issue_url: issue_url))
end
it 'calls Gitlab::ErrorTracking::Logger.error with formatted payload' do
- expect(Gitlab::ErrorTracking::Logger).to receive(:error)
- .with(a_hash_including(*expected_payload_includes))
+ track_exception
- described_class.track_exception(
- exception,
- issue_url: issue_url,
- some_other_info: 'info'
- )
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error)
+ .with(a_hash_including(*expected_payload_includes))
end
context 'with filterable parameters' do
let(:extra) { { test: 1, my_token: 'test' } }
it 'filters parameters' do
- expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
- hash_including({ 'extra.test' => 1, 'extra.my_token' => '[FILTERED]' }))
+ track_exception
- described_class.track_exception(exception, extra)
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error)
+ .with(hash_including({ 'extra.test' => 1, 'extra.my_token' => '[FILTERED]' }))
end
end
@@ -247,44 +239,58 @@ RSpec.describe Gitlab::ErrorTracking do
let(:exception) { double(message: 'bang!', sentry_extra_data: extra_info, backtrace: caller) }
it 'includes the extra data from the exception in the tracking information' do
- expect(Raven).to receive(:capture_exception)
- .with(exception, a_hash_including(extra: a_hash_including(extra_info)))
+ track_exception
- described_class.track_exception(exception)
+ expect(Raven).to have_received(:capture_exception)
+ .with(exception, a_hash_including(extra: a_hash_including(extra_info)))
end
end
context 'the exception implements :sentry_extra_data, which returns nil' do
let(:exception) { double(message: 'bang!', sentry_extra_data: nil, backtrace: caller) }
+ let(:extra) { { issue_url: issue_url } }
it 'just includes the other extra info' do
- extra_info = { issue_url: issue_url }
- expect(Raven).to receive(:capture_exception)
- .with(exception, a_hash_including(extra: a_hash_including(extra_info)))
+ track_exception
- described_class.track_exception(exception, extra_info)
+ expect(Raven).to have_received(:capture_exception)
+ .with(exception, a_hash_including(extra: a_hash_including(extra)))
end
end
context 'with sidekiq args' do
- it 'ensures extra.sidekiq.args is a string' do
- extra = { sidekiq: { 'class' => 'PostReceive', 'args' => [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'] } }
+ context 'when the args does not have anything sensitive' do
+ let(:extra) { { sidekiq: { 'class' => 'PostReceive', 'args' => [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'] } } }
- expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
- hash_including({ 'extra.sidekiq' => { 'class' => 'PostReceive', 'args' => ['1', '{"id"=>2, "name"=>"hello"}', 'some-value', 'another-value'] } }))
+ it 'ensures extra.sidekiq.args is a string' do
+ track_exception
- described_class.track_exception(exception, extra)
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
+ hash_including({ 'extra.sidekiq' => { 'class' => 'PostReceive', 'args' => ['1', '{"id"=>2, "name"=>"hello"}', 'some-value', 'another-value'] } }))
+ end
end
- it 'filters sensitive arguments before sending' do
- extra = { sidekiq: { 'class' => 'UnknownWorker', 'args' => ['sensitive string', 1, 2] } }
+ context 'when the args has sensitive information' do
+ let(:extra) { { sidekiq: { 'class' => 'UnknownWorker', 'args' => ['sensitive string', 1, 2] } } }
+
+ it 'filters sensitive arguments before sending' do
+ track_exception
+
+ expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
+ expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
+ hash_including('extra.sidekiq' => { 'class' => 'UnknownWorker', 'args' => ['[FILTERED]', '1', '2'] }))
+ end
+ end
+ end
- expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
- hash_including('extra.sidekiq' => { 'class' => 'UnknownWorker', 'args' => ['[FILTERED]', '1', '2'] }))
+ context 'when the error is kind of an `ActiveRecord::StatementInvalid`' do
+ let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
- described_class.track_exception(exception, extra)
+ it 'injects the normalized sql query into extra' do
+ track_exception
- expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
+ expect(Raven).to have_received(:capture_exception)
+ .with(exception, a_hash_including(extra: a_hash_including(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')))
end
end
end
diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb
index 361b2329e15..3122a3b1c07 100644
--- a/spec/lib/gitlab/etag_caching/middleware_spec.rb
+++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::EtagCaching::Middleware do
+RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
let(:app_status_code) { 200 }
@@ -10,6 +10,17 @@ RSpec.describe Gitlab::EtagCaching::Middleware do
let(:enabled_path) { '/gitlab-org/gitlab-foss/noteable/issue/1/notes' }
let(:endpoint) { 'issue_notes' }
+ describe '.skip!' do
+ it 'sets the skip header on the response' do
+ rsp = ActionDispatch::Response.new
+ rsp.set_header('Anything', 'Else')
+
+ described_class.skip!(rsp)
+
+ expect(rsp.headers.to_h).to eq(described_class::SKIP_HEADER_KEY => '1', 'Anything' => 'Else')
+ end
+ end
+
context 'when ETag caching is not enabled for current route' do
let(:path) { '/gitlab-org/gitlab-foss/tree/master/noteable/issue/1/notes' }
@@ -17,10 +28,12 @@ RSpec.describe Gitlab::EtagCaching::Middleware do
mock_app_response
end
- it 'does not add ETag header' do
+ it 'does not add ETag headers' do
_, headers, _ = middleware.call(build_request(path, if_none_match))
expect(headers['ETag']).to be_nil
+ expect(headers['X-Gitlab-From-Cache']).to be_nil
+ expect(headers[::Gitlab::Metrics::RequestsRackMiddleware::FEATURE_CATEGORY_HEADER]).to be_nil
end
it 'passes status code from app' do
@@ -68,13 +81,35 @@ RSpec.describe Gitlab::EtagCaching::Middleware do
mock_value_in_store('123')
end
- it 'returns this value as header' do
+ it 'returns the correct headers' do
_, headers, _ = middleware.call(build_request(path, if_none_match))
expect(headers['ETag']).to eq 'W/"123"'
end
end
+ context 'when the matching route requests that the ETag is skipped' do
+ let(:path) { enabled_path }
+ let(:app) do
+ proc do |_env|
+ response = ActionDispatch::Response.new
+
+ described_class.skip!(response)
+
+ [200, response.headers.to_h, '']
+ end
+ end
+
+ it 'returns the correct headers' do
+ expect(app).to receive(:call).and_call_original
+
+ _, headers, _ = middleware.call(build_request(path, if_none_match))
+
+ expect(headers).not_to have_key('ETag')
+ expect(headers).not_to have_key(described_class::SKIP_HEADER_KEY)
+ end
+ end
+
shared_examples 'sends a process_action.action_controller notification' do |status_code|
let(:expected_items) do
{
@@ -126,6 +161,13 @@ RSpec.describe Gitlab::EtagCaching::Middleware do
expect(status).to eq 304
end
+ it 'sets correct headers' do
+ _, headers, _ = middleware.call(build_request(path, if_none_match))
+
+ expect(headers).to include('X-Gitlab-From-Cache' => 'true',
+ ::Gitlab::Metrics::RequestsRackMiddleware::FEATURE_CATEGORY_HEADER => 'issue_tracking')
+ end
+
it_behaves_like 'sends a process_action.action_controller notification', 304
it 'returns empty body' do
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index 3e939e588ad..dbd9cc230f1 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -127,4 +127,12 @@ RSpec.describe Gitlab::EtagCaching::Router do
expect(result).to be_present
expect(result.name).to eq 'project_pipeline'
end
+
+ it 'has a valid feature category for every route', :aggregate_failures do
+ feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).to_set
+
+ described_class::ROUTES.each do |route|
+ expect(feature_categories).to include(route.feature_category), "#{route.name} has a category of #{route.feature_category}, which is not valid"
+ end
+ end
end
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
index 40669f06371..8bf06bcebe2 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
let(:options) { { retries: 0 } }
it 'never sleeps' do
- expect(class_instance).not_to receive(:sleep)
+ expect_any_instance_of(Gitlab::ExclusiveLeaseHelpers::SleepingLock).not_to receive(:sleep)
expect { subject }.to raise_error('Failed to obtain a lock')
end
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
let(:options) { { retries: 1, sleep_sec: 0.05.seconds } }
it 'receives the specified argument' do
- expect_any_instance_of(Object).to receive(:sleep).with(0.05.seconds).once
+ expect_any_instance_of(Gitlab::ExclusiveLeaseHelpers::SleepingLock).to receive(:sleep).with(0.05.seconds).once
expect { subject }.to raise_error('Failed to obtain a lock')
end
@@ -108,8 +108,8 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
let(:options) { { retries: 2, sleep_sec: ->(num) { 0.1 + num } } }
it 'receives the specified argument' do
- expect_any_instance_of(Object).to receive(:sleep).with(1.1.seconds).once
- expect_any_instance_of(Object).to receive(:sleep).with(2.1.seconds).once
+ expect_any_instance_of(Gitlab::ExclusiveLeaseHelpers::SleepingLock).to receive(:sleep).with(1.1.seconds).once
+ expect_any_instance_of(Gitlab::ExclusiveLeaseHelpers::SleepingLock).to receive(:sleep).with(2.1.seconds).once
expect { subject }.to raise_error('Failed to obtain a lock')
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
new file mode 100644
index 00000000000..2fe3d36daf7
--- /dev/null
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -0,0 +1,438 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
+ before do
+ stub_const('Gitlab::Experimentation::EXPERIMENTS', {
+ backwards_compatible_test_experiment: {
+ environment: environment,
+ tracking_category: 'Team',
+ use_backwards_compatible_subject_index: true
+ },
+ test_experiment: {
+ environment: environment,
+ tracking_category: 'Team'
+ }
+ }
+ )
+
+ Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
+ Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
+ end
+
+ let(:environment) { Rails.env.test? }
+ let(:enabled_percentage) { 10 }
+
+ controller(ApplicationController) do
+ include Gitlab::Experimentation::ControllerConcern
+
+ def index
+ head :ok
+ end
+ end
+
+ describe '#set_experimentation_subject_id_cookie' do
+ let(:do_not_track) { nil }
+ let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
+
+ before do
+ request.headers['DNT'] = do_not_track if do_not_track.present?
+
+ get :index
+ end
+
+ context 'cookie is present' do
+ before do
+ cookies[:experimentation_subject_id] = 'test'
+ end
+
+ it 'does not change the cookie' do
+ expect(cookies[:experimentation_subject_id]).to eq 'test'
+ end
+ end
+
+ context 'cookie is not present' do
+ it 'sets a permanent signed cookie' do
+ expect(cookie).to be_present
+ end
+
+ context 'DNT: 0' do
+ let(:do_not_track) { '0' }
+
+ it 'sets a permanent signed cookie' do
+ expect(cookie).to be_present
+ end
+ end
+
+ context 'DNT: 1' do
+ let(:do_not_track) { '1' }
+
+ it 'does nothing' do
+ expect(cookie).not_to be_present
+ end
+ end
+ end
+ end
+
+ describe '#push_frontend_experiment' do
+ it 'pushes an experiment to the frontend' do
+ gon = instance_double('gon')
+ experiments = { experiments: { 'myExperiment' => true } }
+
+ stub_experiment_for_user(my_experiment: true)
+ allow(controller).to receive(:gon).and_return(gon)
+
+ expect(gon).to receive(:push).with(experiments, true)
+
+ controller.push_frontend_experiment(:my_experiment)
+ end
+ end
+
+ describe '#experiment_enabled?' do
+ def check_experiment(exp_key = :test_experiment)
+ controller.experiment_enabled?(exp_key)
+ end
+
+ subject { check_experiment }
+
+ context 'cookie is not present' do
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of nil' do
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, nil)
+ check_experiment
+ end
+ end
+
+ context 'cookie is present' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
+ get :index
+ end
+
+ where(:experiment_key, :index_value) do
+ :test_experiment | 40 # Zlib.crc32('test_experimentabcd-1234') % 100 = 40
+ :backwards_compatible_test_experiment | 76 # 'abcd1234'.hex % 100 = 76
+ end
+
+ with_them do
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and the calculated experimentation_subject_index based on the uuid' do
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(experiment_key, index_value)
+ check_experiment(experiment_key)
+ end
+ end
+ end
+
+ it 'returns true when DNT: 0 is set in the request' do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
+ controller.request.headers['DNT'] = '0'
+
+ is_expected.to be_truthy
+ end
+
+ it 'returns false when DNT: 1 is set in the request' do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
+ controller.request.headers['DNT'] = '1'
+
+ is_expected.to be_falsy
+ end
+
+ describe 'URL parameter to force enable experiment' do
+ it 'returns true unconditionally' do
+ get :index, params: { force_experiment: :test_experiment }
+
+ is_expected.to be_truthy
+ end
+ end
+ end
+
+ describe '#track_experiment_event', :snowplow do
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'tracks the event with the right parameters' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
+ property: 'experimental_group',
+ value: 1
+ )
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ stub_experiment_for_user(test_experiment: false)
+ end
+
+ it 'tracks the event with the right parameters' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 1
+ )
+ end
+ end
+
+ context 'do not track is disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'does track the event' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 1
+ )
+ end
+ end
+
+ context 'do not track enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not track the event' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_no_snowplow_event
+ end
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ end
+
+ it 'does not track the event' do
+ controller.track_experiment_event(:test_experiment, 'start')
+
+ expect_no_snowplow_event
+ end
+ end
+ end
+
+ describe '#frontend_experimentation_tracking_data' do
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'experimental_group',
+ value: 'team_id'
+ }
+ )
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 'team_id'
+ }
+ )
+ end
+
+ it 'does not send nil value to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group'
+ }
+ )
+ end
+ end
+
+ context 'do not track disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group'
+ }
+ )
+ end
+ end
+
+ context 'do not track enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not push data to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+
+ expect(Gon.method_defined?(:tracking_data)).to be_falsey
+ end
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ end
+
+ it 'does not push data to gon' do
+ expect(Gon.method_defined?(:tracking_data)).to be_falsey
+ controller.track_experiment_event(:test_experiment, 'start')
+ end
+ end
+ end
+
+ describe '#record_experiment_user' do
+ let(:user) { build(:user) }
+
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'when there is no current_user' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'do not track' do
+ before do
+ allow(controller).to receive(:current_user).and_return(user)
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ context 'is disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'is enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+ end
+ end
+
+ describe '#experiment_tracking_category_and_group' do
+ let_it_be(:experiment_key) { :test_something }
+
+ subject { controller.experiment_tracking_category_and_group(experiment_key) }
+
+ it 'returns a string with the experiment tracking category & group joined with a ":"' do
+ expect(controller).to receive(:tracking_category).with(experiment_key).and_return('Experiment::Category')
+ expect(controller).to receive(:tracking_group).with(experiment_key, '_group').and_return('experimental_group')
+
+ expect(subject).to eq('Experiment::Category:experimental_group')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/experimentation/group_types_spec.rb b/spec/lib/gitlab/experimentation/group_types_spec.rb
new file mode 100644
index 00000000000..599ad08f706
--- /dev/null
+++ b/spec/lib/gitlab/experimentation/group_types_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Experimentation::GroupTypes do
+ it 'defines a GROUP_CONTROL constant' do
+ expect(described_class.const_defined?(:GROUP_CONTROL)).to be_truthy
+ end
+
+ it 'defines a GROUP_EXPERIMENTAL constant' do
+ expect(described_class.const_defined?(:GROUP_EXPERIMENTAL)).to be_truthy
+ end
+end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index e93593d348f..ebf98a0151f 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -2,423 +2,54 @@
require 'spec_helper'
+# As each associated, backwards-compatible experiment gets cleaned up and removed from the EXPERIMENTS list, its key will also get removed from this list. Once the list here is empty, we can remove the backwards compatibility code altogether.
+# Originally created as part of https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45733 for https://gitlab.com/gitlab-org/gitlab/-/issues/270858.
+RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
+ it 'temporarily ensures we know what experiments exist for backwards compatibility' do
+ expected_experiment_keys = [
+ :onboarding_issues,
+ :ci_notification_dot,
+ :upgrade_link_in_user_menu_a,
+ :invite_members_version_a,
+ :invite_members_version_b,
+ :invite_members_empty_group_version_a,
+ :new_create_project_ui,
+ :contact_sales_btn_in_app,
+ :customize_homepage,
+ :invite_email,
+ :invitation_reminders,
+ :group_only_trials,
+ :default_to_issues_board
+ ]
+
+ backwards_compatible_experiment_keys = described_class.filter { |_, v| v[:use_backwards_compatible_subject_index] }.keys
+
+ expect(backwards_compatible_experiment_keys).not_to be_empty, "Oh, hey! Let's clean up that :use_backwards_compatible_subject_index stuff now :D"
+ expect(backwards_compatible_experiment_keys).to match(expected_experiment_keys)
+ end
+end
+
RSpec.describe Gitlab::Experimentation, :snowplow do
before do
stub_const('Gitlab::Experimentation::EXPERIMENTS', {
+ backwards_compatible_test_experiment: {
+ environment: environment,
+ tracking_category: 'Team',
+ use_backwards_compatible_subject_index: true
+ },
test_experiment: {
environment: environment,
tracking_category: 'Team'
}
})
+ Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
end
let(:environment) { Rails.env.test? }
let(:enabled_percentage) { 10 }
- describe Gitlab::Experimentation::ControllerConcern, type: :controller do
- controller(ApplicationController) do
- include Gitlab::Experimentation::ControllerConcern
-
- def index
- head :ok
- end
- end
-
- describe '#set_experimentation_subject_id_cookie' do
- let(:do_not_track) { nil }
- let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
-
- before do
- request.headers['DNT'] = do_not_track if do_not_track.present?
-
- get :index
- end
-
- context 'cookie is present' do
- before do
- cookies[:experimentation_subject_id] = 'test'
- end
-
- it 'does not change the cookie' do
- expect(cookies[:experimentation_subject_id]).to eq 'test'
- end
- end
-
- context 'cookie is not present' do
- it 'sets a permanent signed cookie' do
- expect(cookie).to be_present
- end
-
- context 'DNT: 0' do
- let(:do_not_Track) { '0' }
-
- it 'sets a permanent signed cookie' do
- expect(cookie).to be_present
- end
- end
-
- context 'DNT: 1' do
- let(:do_not_track) { '1' }
-
- it 'does nothing' do
- expect(cookie).not_to be_present
- end
- end
- end
- end
-
- describe '#push_frontend_experiment' do
- it 'pushes an experiment to the frontend' do
- gon = instance_double('gon')
- experiments = { experiments: { 'myExperiment' => true } }
-
- stub_experiment_for_user(my_experiment: true)
- allow(controller).to receive(:gon).and_return(gon)
-
- expect(gon).to receive(:push).with(experiments, true)
-
- controller.push_frontend_experiment(:my_experiment)
- end
- end
-
- describe '#experiment_enabled?' do
- subject { controller.experiment_enabled?(:test_experiment) }
-
- context 'cookie is not present' do
- it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of nil' do
- expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, nil)
- controller.experiment_enabled?(:test_experiment)
- end
- end
-
- context 'cookie is present' do
- before do
- cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
- get :index
- end
-
- it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
- # 'abcd1234'.hex % 100 = 76
- expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, 76)
- controller.experiment_enabled?(:test_experiment)
- end
- end
-
- it 'returns true when DNT: 0 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
- controller.request.headers['DNT'] = '0'
-
- is_expected.to be_truthy
- end
-
- it 'returns false when DNT: 1 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
- controller.request.headers['DNT'] = '1'
-
- is_expected.to be_falsy
- end
-
- describe 'URL parameter to force enable experiment' do
- it 'returns true unconditionally' do
- get :index, params: { force_experiment: :test_experiment }
-
- is_expected.to be_truthy
- end
- end
- end
-
- describe '#track_experiment_event' do
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_user(test_experiment: true)
- end
-
- it 'tracks the event with the right parameters' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'experimental_group',
- value: 1
- )
- end
- end
-
- context 'the user is part of the control group' do
- before do
- stub_experiment_for_user(test_experiment: false)
- end
-
- it 'tracks the event with the right parameters' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1
- )
- end
- end
-
- context 'do not track is disabled' do
- before do
- request.headers['DNT'] = '0'
- end
-
- it 'does track the event' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1
- )
- end
- end
-
- context 'do not track enabled' do
- before do
- request.headers['DNT'] = '1'
- end
-
- it 'does not track the event' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_no_snowplow_event
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- end
-
- it 'does not track the event' do
- controller.track_experiment_event(:test_experiment, 'start')
-
- expect_no_snowplow_event
- end
- end
- end
-
- describe '#frontend_experimentation_tracking_data' do
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_user(test_experiment: true)
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'experimental_group',
- value: 'team_id'
- }
- )
- end
- end
-
- context 'the user is part of the control group' do
- before do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
- end
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 'team_id'
- }
- )
- end
-
- it 'does not send nil value to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group'
- }
- )
- end
- end
-
- context 'do not track disabled' do
- before do
- request.headers['DNT'] = '0'
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
-
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group'
- }
- )
- end
- end
-
- context 'do not track enabled' do
- before do
- request.headers['DNT'] = '1'
- end
-
- it 'does not push data to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
-
- expect(Gon.method_defined?(:tracking_data)).to be_falsey
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- end
-
- it 'does not push data to gon' do
- expect(Gon.method_defined?(:tracking_data)).to be_falsey
- controller.track_experiment_event(:test_experiment, 'start')
- end
- end
- end
-
- describe '#record_experiment_user' do
- let(:user) { build(:user) }
-
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_user(test_experiment: true)
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'the user is part of the control group' do
- before do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
- end
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'when there is no current_user' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'do not track' do
- before do
- allow(controller).to receive(:current_user).and_return(user)
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
- end
- end
-
- context 'is disabled' do
- before do
- request.headers['DNT'] = '0'
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'is enabled' do
- before do
- request.headers['DNT'] = '1'
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
- end
- end
-
- describe '#experiment_tracking_category_and_group' do
- let_it_be(:experiment_key) { :test_something }
-
- subject { controller.experiment_tracking_category_and_group(experiment_key) }
-
- it 'returns a string with the experiment tracking category & group joined with a ":"' do
- expect(controller).to receive(:tracking_category).with(experiment_key).and_return('Experiment::Category')
- expect(controller).to receive(:tracking_group).with(experiment_key, '_group').and_return('experimental_group')
-
- expect(subject).to eq('Experiment::Category:experimental_group')
- end
- end
- end
-
describe '.enabled?' do
subject { described_class.enabled?(:test_experiment) }
@@ -442,6 +73,14 @@ RSpec.describe Gitlab::Experimentation, :snowplow do
let(:environment) { ::Gitlab.com? }
it { is_expected.to be_falsey }
+
+ it 'ensures the typically less expensive environment is checked before the more expensive call to database for Feature' do
+ expect_next_instance_of(described_class::Experiment) do |experiment|
+ expect(experiment).not_to receive(:enabled?)
+ end
+
+ subject
+ end
end
end
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 980a52bb61e..d4174a34433 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -301,19 +301,19 @@ EOT
describe '#too_large?' do
it 'returns true for a diff that is too large' do
- diff = described_class.new(diff: 'a' * 204800)
+ diff = described_class.new({ diff: 'a' * 204800 })
expect(diff.too_large?).to eq(true)
end
it 'returns false for a diff that is small enough' do
- diff = described_class.new(diff: 'a')
+ diff = described_class.new({ diff: 'a' })
expect(diff.too_large?).to eq(false)
end
it 'returns true for a diff that was explicitly marked as being too large' do
- diff = described_class.new(diff: 'a')
+ diff = described_class.new({ diff: 'a' })
diff.too_large!
@@ -323,19 +323,19 @@ EOT
describe '#collapsed?' do
it 'returns false by default even on quite big diff' do
- diff = described_class.new(diff: 'a' * 20480)
+ diff = described_class.new({ diff: 'a' * 20480 })
expect(diff).not_to be_collapsed
end
it 'returns false by default for a diff that is small enough' do
- diff = described_class.new(diff: 'a')
+ diff = described_class.new({ diff: 'a' })
expect(diff).not_to be_collapsed
end
it 'returns true for a diff that was explicitly marked as being collapsed' do
- diff = described_class.new(diff: 'a')
+ diff = described_class.new({ diff: 'a' })
diff.collapse!
@@ -359,7 +359,7 @@ EOT
describe '#collapse!' do
it 'prunes the diff' do
- diff = described_class.new(diff: "foo\nbar")
+ diff = described_class.new({ diff: "foo\nbar" })
diff.collapse!
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index 8c481cdee08..f5d8758a78a 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccessSnippet do
include ProjectHelpers
include TermsHelper
+ include AdminModeHelper
include_context 'ProjectPolicyTable context'
using RSpec::Parameterized::TableSyntax
@@ -207,12 +208,13 @@ RSpec.describe Gitlab::GitAccessSnippet do
let(:snippet) { create(:personal_snippet, snippet_level, :repository) }
let(:user) { membership == :author ? snippet.author : create_user_from_membership(nil, membership) }
- where(:snippet_level, :membership, :_expected_count) do
+ where(:snippet_level, :membership, :admin_mode, :_expected_count) do
permission_table_for_personal_snippet_access
end
with_them do
it "respects accessibility" do
+ enable_admin_mode!(user) if admin_mode
error_class = described_class::ForbiddenError
if Ability.allowed?(user, :update_snippet, snippet)
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index f977fe1638f..b09bd9dff1b 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
safe_max_files: 100,
safe_max_lines: 5000,
safe_max_bytes: 512000,
- max_patch_bytes: 102400
+ max_patch_bytes: 204800
)
expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:commit_diff).with(request, kind_of(Hash))
@@ -57,7 +57,7 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
safe_max_files: 100,
safe_max_lines: 5000,
safe_max_bytes: 512000,
- max_patch_bytes: 102400
+ max_patch_bytes: 204800
)
expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:commit_diff).with(request, kind_of(Hash))
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index c7ea0a95596..f810a5c15a5 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
.with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
.and_return(double(:garbage_collect_response))
- client.garbage_collect(true)
+ client.garbage_collect(true, prune: true)
end
end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 5f6ab42d0d2..bc734644d29 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -203,16 +203,40 @@ RSpec.describe Gitlab::GithubImport::Client do
describe '#requests_remaining?' do
let(:client) { described_class.new('foo') }
- it 'returns true if enough requests remain' do
- expect(client).to receive(:remaining_requests).and_return(9000)
+ context 'when default requests limit is set' do
+ before do
+ allow(client).to receive(:requests_limit).and_return(5000)
+ end
+
+ it 'returns true if enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(9000)
+
+ expect(client.requests_remaining?).to eq(true)
+ end
+
+ it 'returns false if not enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(1)
- expect(client.requests_remaining?).to eq(true)
+ expect(client.requests_remaining?).to eq(false)
+ end
end
- it 'returns false if not enough requests remain' do
- expect(client).to receive(:remaining_requests).and_return(1)
+ context 'when search requests limit is set' do
+ before do
+ allow(client).to receive(:requests_limit).and_return(described_class::SEARCH_MAX_REQUESTS_PER_MINUTE)
+ end
+
+ it 'returns true if enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(described_class::SEARCH_RATE_LIMIT_THRESHOLD + 1)
+
+ expect(client.requests_remaining?).to eq(true)
+ end
+
+ it 'returns false if not enough requests remain' do
+ expect(client).to receive(:remaining_requests).and_return(described_class::SEARCH_RATE_LIMIT_THRESHOLD - 1)
- expect(client.requests_remaining?).to eq(false)
+ expect(client.requests_remaining?).to eq(false)
+ end
end
end
@@ -262,6 +286,16 @@ RSpec.describe Gitlab::GithubImport::Client do
end
end
+ describe '#requests_limit' do
+ it 'returns requests limit' do
+ client = described_class.new('foo')
+ rate_limit = double(limit: 1)
+
+ expect(client.octokit).to receive(:rate_limit).and_return(rate_limit)
+ expect(client.requests_limit).to eq(1)
+ end
+ end
+
describe '#rate_limit_resets_in' do
it 'returns the number of seconds after which the rate limit is reset' do
client = described_class.new('foo')
@@ -299,6 +333,32 @@ RSpec.describe Gitlab::GithubImport::Client do
end
end
+ describe '#web_endpoint' do
+ let(:client) { described_class.new('foo') }
+
+ context 'without a custom endpoint configured in Omniauth' do
+ it 'returns the default web endpoint' do
+ expect(client)
+ .to receive(:custom_api_endpoint)
+ .and_return(nil)
+
+ expect(client.web_endpoint).to eq('https://github.com')
+ end
+ end
+
+ context 'with a custom endpoint configured in Omniauth' do
+ it 'returns the custom endpoint' do
+ endpoint = 'https://github.kittens.com'
+
+ expect(client)
+ .to receive(:custom_api_endpoint)
+ .and_return(endpoint)
+
+ expect(client.web_endpoint).to eq(endpoint)
+ end
+ end
+ end
+
describe '#custom_api_endpoint' do
let(:client) { described_class.new('foo') }
@@ -391,4 +451,61 @@ RSpec.describe Gitlab::GithubImport::Client do
expect(client.rate_limiting_enabled?).to eq(false)
end
end
+
+ describe 'search' do
+ let(:client) { described_class.new('foo') }
+ let(:user) { double(:user, login: 'user') }
+ let(:org1) { double(:org, login: 'org1') }
+ let(:org2) { double(:org, login: 'org2') }
+ let(:repo1) { double(:repo, full_name: 'repo1') }
+ let(:repo2) { double(:repo, full_name: 'repo2') }
+
+ before do
+ allow(client)
+ .to receive(:each_object)
+ .with(:repos, nil, { affiliation: 'collaborator' })
+ .and_return([repo1, repo2].to_enum)
+
+ allow(client)
+ .to receive(:each_object)
+ .with(:organizations)
+ .and_return([org1, org2].to_enum)
+
+ allow(client.octokit).to receive(:user).and_return(user)
+ end
+
+ describe '#search_repos_by_name' do
+ it 'searches for repositories based on name' do
+ expected_search_query = 'test in:name is:public,private user:user repo:repo1 repo:repo2 org:org1 org:org2'
+
+ expect(client).to receive(:each_page).with(:search_repositories, expected_search_query)
+
+ client.search_repos_by_name('test')
+ end
+ end
+
+ describe '#search_query' do
+ it 'returns base search query' do
+ result = client.search_query(str: 'test', type: :test, include_collaborations: false, include_orgs: false)
+
+ expect(result).to eq('test in:test is:public,private user:user')
+ end
+
+ context 'when include_collaborations is true' do
+ it 'returns search query including users' do
+ result = client.search_query(str: 'test', type: :test, include_collaborations: true, include_orgs: false)
+
+ expect(result).to eq('test in:test is:public,private user:user repo:repo1 repo:repo2')
+ end
+ end
+
+ context 'when include_orgs is true' do
+ it 'returns search query including users' do
+ result = client.search_query(str: 'test', type: :test, include_collaborations: false, include_orgs: true)
+
+ expect(result).to eq('test in:test is:public,private user:user org:org1 org:org2')
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb
index add554992f1..188c56ae81f 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_object_importer_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectImporter do
}
end
- let(:lfs_download_object) { LfsDownloadObject.new(lfs_attributes) }
+ let(:lfs_download_object) { LfsDownloadObject.new(**lfs_attributes) }
let(:github_lfs_object) { Gitlab::GithubImport::Representation::LfsObject.new(lfs_attributes) }
let(:importer) { described_class.new(github_lfs_object, project, nil) }
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 1f7b14661c2..6188ba8ec3f 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
}
end
- let(:lfs_download_object) { LfsDownloadObject.new(lfs_attributes) }
+ let(:lfs_download_object) { LfsDownloadObject.new(**lfs_attributes) }
describe '#parallel?' do
it 'returns true when running in parallel mode' do
diff --git a/spec/lib/gitlab/github_import/sequential_importer_spec.rb b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
index fe13fcd2568..a5e89049ed9 100644
--- a/spec/lib/gitlab/github_import/sequential_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/sequential_importer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::GithubImport::SequentialImporter do
describe '#execute' do
it 'imports a project in sequence' do
repository = double(:repository)
- project = double(:project, id: 1, repository: repository)
+ project = double(:project, id: 1, repository: repository, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git')
importer = described_class.new(project, token: 'foo')
expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
diff --git a/spec/lib/gitlab/github_import_spec.rb b/spec/lib/gitlab/github_import_spec.rb
index 1a690b81d2b..3129da64809 100644
--- a/spec/lib/gitlab/github_import_spec.rb
+++ b/spec/lib/gitlab/github_import_spec.rb
@@ -3,13 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport do
- let(:project) { double(:project) }
+ context 'github.com' do
+ let(:project) { double(:project, import_url: 'http://t0ken@github.com/user/repo.git') }
- describe '.new_client_for' do
it 'returns a new Client with a custom token' do
expect(described_class::Client)
.to receive(:new)
- .with('123', parallel: true)
+ .with('123', host: nil, parallel: true)
described_class.new_client_for(project, token: '123')
end
@@ -23,18 +23,57 @@ RSpec.describe Gitlab::GithubImport do
expect(described_class::Client)
.to receive(:new)
- .with('123', parallel: true)
+ .with('123', host: nil, parallel: true)
described_class.new_client_for(project)
end
+
+ it 'returns the ID of the ghost user', :clean_gitlab_redis_cache do
+ expect(described_class.ghost_user_id).to eq(User.ghost.id)
+ end
+
+ it 'caches the ghost user ID', :clean_gitlab_redis_cache do
+ expect(Gitlab::Cache::Import::Caching)
+ .to receive(:write)
+ .once
+ .and_call_original
+
+ 2.times do
+ described_class.ghost_user_id
+ end
+ end
end
- describe '.ghost_user_id', :clean_gitlab_redis_cache do
- it 'returns the ID of the ghost user' do
+ context 'GitHub Enterprise' do
+ let(:project) { double(:project, import_url: 'http://t0ken@github.another-domain.com/repo-org/repo.git') }
+
+ it 'returns a new Client with a custom token' do
+ expect(described_class::Client)
+ .to receive(:new)
+ .with('123', host: 'http://github.another-domain.com/api/v3', parallel: true)
+
+ described_class.new_client_for(project, token: '123')
+ end
+
+ it 'returns a new Client with a token stored in the import data' do
+ import_data = double(:import_data, credentials: { user: '123' })
+
+ expect(project)
+ .to receive(:import_data)
+ .and_return(import_data)
+
+ expect(described_class::Client)
+ .to receive(:new)
+ .with('123', host: 'http://github.another-domain.com/api/v3', parallel: true)
+
+ described_class.new_client_for(project)
+ end
+
+ it 'returns the ID of the ghost user', :clean_gitlab_redis_cache do
expect(described_class.ghost_user_id).to eq(User.ghost.id)
end
- it 'caches the ghost user ID' do
+ it 'caches the ghost user ID', :clean_gitlab_redis_cache do
expect(Gitlab::Cache::Import::Caching)
.to receive(:write)
.once
@@ -44,5 +83,9 @@ RSpec.describe Gitlab::GithubImport do
described_class.ghost_user_id
end
end
+
+ it 'formats the import url' do
+ expect(described_class.formatted_import_url(project)).to eq('http://github.another-domain.com/api/v3')
+ end
end
end
diff --git a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
index 91299de0751..487b19a98e0 100644
--- a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
+++ b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
@@ -15,7 +15,8 @@ RSpec.describe Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp do
path: '/api/v4/projects/1',
params: {
'description': '[FILTERED]',
- 'name': 'gitlab test'
+ 'name': 'gitlab test',
+ 'int': 42
},
host: 'localhost',
remote_ip: '127.0.0.1',
@@ -44,7 +45,8 @@ RSpec.describe Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp do
expect(params).to eq([
{ 'key' => 'description', 'value' => '[FILTERED]' },
- { 'key' => 'name', 'value' => 'gitlab test' }
+ { 'key' => 'name', 'value' => 'gitlab test' },
+ { 'key' => 'int', 'value' => 42 }
])
end
end
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
index 7576523ce52..c88506899cd 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
@@ -27,13 +27,17 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
end
end
+ def resolve
+ service.authorized_resolve[type_instance, {}, context]
+ end
+
subject(:service) { described_class.new(field) }
describe '#authorized_resolve' do
let_it_be(:current_user) { build(:user) }
let_it_be(:presented_object) { 'presented object' }
let_it_be(:query_type) { GraphQL::ObjectType.new }
- let_it_be(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
+ let_it_be(:schema) { GitlabSchema }
let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: { current_user: current_user }, object: nil) }
@@ -41,125 +45,201 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
let(:type_instance) { type_class.authorized_new(presented_object, context) }
let(:field) { type_class.fields['testField'].to_graphql }
- subject(:resolved) { service.authorized_resolve.call(type_instance, {}, context) }
+ subject(:resolved) { ::Gitlab::Graphql::Lazy.force(resolve) }
- context 'scalar types' do
- shared_examples 'checking permissions on the presented object' do
- it 'checks the abilities on the object being presented and returns the value' do
- expected_permissions.each do |permission|
- spy_ability_check_for(permission, presented_object, passed: true)
- end
+ context 'reading the field of a lazy value' do
+ let(:ability) { :read_field }
+ let(:presented_object) { lazy_upcase('a') }
+ let(:type_class) { type_with_field(GraphQL::STRING_TYPE, ability) }
- expect(resolved).to eq('Resolved value')
+ let(:upcaser) do
+ Module.new do
+ def self.upcase(strs)
+ strs.map(&:upcase)
+ end
end
+ end
- it 'returns nil if the value was not authorized' do
- allow(Ability).to receive(:allowed?).and_return false
-
- expect(resolved).to be_nil
+ def lazy_upcase(str)
+ ::BatchLoader::GraphQL.for(str).batch do |strs, found|
+ strs.zip(upcaser.upcase(strs)).each { |s, us| found[s, us] }
end
end
- context 'when the field is a built-in scalar type' do
- let(:type_class) { type_with_field(GraphQL::STRING_TYPE, :read_field) }
- let(:expected_permissions) { [:read_field] }
+ it 'does not run authorizations until we force the resolved value' do
+ expect(Ability).not_to receive(:allowed?)
- it_behaves_like 'checking permissions on the presented object'
+ expect(resolve).to respond_to(:force)
end
- context 'when the field is a list of scalar types' do
- let(:type_class) { type_with_field([GraphQL::STRING_TYPE], :read_field) }
- let(:expected_permissions) { [:read_field] }
+ it 'runs authorizations when we force the resolved value' do
+ spy_ability_check_for(ability, 'A')
- it_behaves_like 'checking permissions on the presented object'
+ expect(resolved).to eq('Resolved value')
end
- context 'when the field is sub-classed scalar type' do
- let(:type_class) { type_with_field(Types::TimeType, :read_field) }
- let(:expected_permissions) { [:read_field] }
+ it 'redacts values that fail the permissions check' do
+ spy_ability_check_for(ability, 'A', passed: false)
- it_behaves_like 'checking permissions on the presented object'
+ expect(resolved).to be_nil
end
- context 'when the field is a list of sub-classed scalar types' do
- let(:type_class) { type_with_field([Types::TimeType], :read_field) }
- let(:expected_permissions) { [:read_field] }
+ context 'we batch two calls' do
+ def resolve(value)
+ instance = type_class.authorized_new(lazy_upcase(value), context)
+ service.authorized_resolve[instance, {}, context]
+ end
- it_behaves_like 'checking permissions on the presented object'
- end
- end
+ it 'batches resolution, but authorizes each object separately' do
+ expect(upcaser).to receive(:upcase).once.and_call_original
+ spy_ability_check_for(:read_field, 'A', passed: true)
+ spy_ability_check_for(:read_field, 'B', passed: false)
+ spy_ability_check_for(:read_field, 'C', passed: true)
- context 'when the field is a connection' do
- context 'when it resolves to nil' do
- let(:type_class) { type_with_field(Types::QueryType.connection_type, :read_field, nil) }
+ a = resolve('a')
+ b = resolve('b')
+ c = resolve('c')
- it 'does not fail when authorizing' do
- expect(resolved).to be_nil
+ expect(a.force).to be_present
+ expect(b.force).to be_nil
+ expect(c.force).to be_present
end
end
end
- context 'when the field is a specific type' do
- let(:custom_type) { type(:read_type) }
- let(:object_in_field) { double('presented in field') }
+ shared_examples 'authorizing fields' do
+ context 'scalar types' do
+ shared_examples 'checking permissions on the presented object' do
+ it 'checks the abilities on the object being presented and returns the value' do
+ expected_permissions.each do |permission|
+ spy_ability_check_for(permission, presented_object, passed: true)
+ end
- let(:type_class) { type_with_field(custom_type, :read_field, object_in_field) }
- let(:type_instance) { type_class.authorized_new(object_in_field, context) }
+ expect(resolved).to eq('Resolved value')
+ end
- subject(:resolved) { service.authorized_resolve.call(type_instance, {}, context) }
+ it 'returns nil if the value was not authorized' do
+ allow(Ability).to receive(:allowed?).and_return false
- it 'checks both field & type permissions' do
- spy_ability_check_for(:read_field, object_in_field, passed: true)
- spy_ability_check_for(:read_type, object_in_field, passed: true)
+ expect(resolved).to be_nil
+ end
+ end
- expect(resolved).to eq(object_in_field)
- end
+ context 'when the field is a built-in scalar type' do
+ let(:type_class) { type_with_field(GraphQL::STRING_TYPE, :read_field) }
+ let(:expected_permissions) { [:read_field] }
- it 'returns nil if viewing was not allowed' do
- spy_ability_check_for(:read_field, object_in_field, passed: false)
- spy_ability_check_for(:read_type, object_in_field, passed: true)
+ it_behaves_like 'checking permissions on the presented object'
+ end
- expect(resolved).to be_nil
+ context 'when the field is a list of scalar types' do
+ let(:type_class) { type_with_field([GraphQL::STRING_TYPE], :read_field) }
+ let(:expected_permissions) { [:read_field] }
+
+ it_behaves_like 'checking permissions on the presented object'
+ end
+
+ context 'when the field is sub-classed scalar type' do
+ let(:type_class) { type_with_field(Types::TimeType, :read_field) }
+ let(:expected_permissions) { [:read_field] }
+
+ it_behaves_like 'checking permissions on the presented object'
+ end
+
+ context 'when the field is a list of sub-classed scalar types' do
+ let(:type_class) { type_with_field([Types::TimeType], :read_field) }
+ let(:expected_permissions) { [:read_field] }
+
+ it_behaves_like 'checking permissions on the presented object'
+ end
end
- context 'when the field is not nullable' do
- let(:type_class) { type_with_field(custom_type, :read_field, object_in_field, null: false) }
+ context 'when the field is a connection' do
+ context 'when it resolves to nil' do
+ let(:type_class) { type_with_field(Types::QueryType.connection_type, :read_field, nil) }
+
+ it 'does not fail when authorizing' do
+ expect(resolved).to be_nil
+ end
+ end
- it 'returns nil when viewing is not allowed' do
- spy_ability_check_for(:read_type, object_in_field, passed: false)
+ context 'when it returns values' do
+ let(:objects) { [1, 2, 3] }
+ let(:field_type) { type([:read_object]).connection_type }
+ let(:type_class) { type_with_field(field_type, [], objects) }
- expect(resolved).to be_nil
+ it 'filters out unauthorized values' do
+ spy_ability_check_for(:read_object, 1, passed: true)
+ spy_ability_check_for(:read_object, 2, passed: false)
+ spy_ability_check_for(:read_object, 3, passed: true)
+
+ expect(resolved.nodes).to eq [1, 3]
+ end
end
end
- context 'when the field is a list' do
- let(:object_1) { double('presented in field 1') }
- let(:object_2) { double('presented in field 2') }
- let(:presented_types) { [double(object: object_1), double(object: object_2)] }
+ context 'when the field is a specific type' do
+ let(:custom_type) { type(:read_type) }
+ let(:object_in_field) { double('presented in field') }
+
+ let(:type_class) { type_with_field(custom_type, :read_field, object_in_field) }
+ let(:type_instance) { type_class.authorized_new(object_in_field, context) }
+
+ it 'checks both field & type permissions' do
+ spy_ability_check_for(:read_field, object_in_field, passed: true)
+ spy_ability_check_for(:read_type, object_in_field, passed: true)
+
+ expect(resolved).to eq(object_in_field)
+ end
+
+ it 'returns nil if viewing was not allowed' do
+ spy_ability_check_for(:read_field, object_in_field, passed: false)
+ spy_ability_check_for(:read_type, object_in_field, passed: true)
- let(:type_class) { type_with_field([custom_type], :read_field, presented_types) }
- let(:type_instance) { type_class.authorized_new(presented_types, context) }
+ expect(resolved).to be_nil
+ end
- it 'checks all permissions' do
- allow(Ability).to receive(:allowed?) { true }
+ context 'when the field is not nullable' do
+ let(:type_class) { type_with_field(custom_type, :read_field, object_in_field, null: false) }
- spy_ability_check_for(:read_field, object_1, passed: true)
- spy_ability_check_for(:read_type, object_1, passed: true)
- spy_ability_check_for(:read_field, object_2, passed: true)
- spy_ability_check_for(:read_type, object_2, passed: true)
+ it 'returns nil when viewing is not allowed' do
+ spy_ability_check_for(:read_type, object_in_field, passed: false)
- expect(resolved).to eq(presented_types)
+ expect(resolved).to be_nil
+ end
end
- it 'filters out objects that the user cannot see' do
- allow(Ability).to receive(:allowed?) { true }
+ context 'when the field is a list' do
+ let(:object_1) { double('presented in field 1') }
+ let(:object_2) { double('presented in field 2') }
+ let(:presented_types) { [double(object: object_1), double(object: object_2)] }
+
+ let(:type_class) { type_with_field([custom_type], :read_field, presented_types) }
+ let(:type_instance) { type_class.authorized_new(presented_types, context) }
+
+ it 'checks all permissions' do
+ allow(Ability).to receive(:allowed?) { true }
- spy_ability_check_for(:read_type, object_1, passed: false)
+ spy_ability_check_for(:read_field, object_1, passed: true)
+ spy_ability_check_for(:read_type, object_1, passed: true)
+ spy_ability_check_for(:read_field, object_2, passed: true)
+ spy_ability_check_for(:read_type, object_2, passed: true)
- expect(resolved.map(&:object)).to contain_exactly(object_2)
+ expect(resolved).to eq(presented_types)
+ end
+
+ it 'filters out objects that the user cannot see' do
+ allow(Ability).to receive(:allowed?) { true }
+
+ spy_ability_check_for(:read_type, object_1, passed: false)
+
+ expect(resolved).to contain_exactly(have_attributes(object: object_2))
+ end
end
end
end
+
+ it_behaves_like 'authorizing fields'
end
private
diff --git a/spec/lib/gitlab/graphql/lazy_spec.rb b/spec/lib/gitlab/graphql/lazy_spec.rb
new file mode 100644
index 00000000000..795978ab0a4
--- /dev/null
+++ b/spec/lib/gitlab/graphql/lazy_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::Lazy do
+ def load(key)
+ BatchLoader.for(key).batch do |keys, loader|
+ keys.each { |x| loader.call(x, x * x) }
+ end
+ end
+
+ let(:value) { double(x: 1) }
+
+ describe '#force' do
+ subject { described_class.new { value.x } }
+
+ it 'can extract the value' do
+ expect(subject.force).to be 1
+ end
+
+ it 'can derive new lazy values' do
+ expect(subject.then { |x| x + 2 }.force).to be 3
+ end
+
+ it 'only evaluates once' do
+ expect(value).to receive(:x).once
+
+ expect(subject.force).to eq(subject.force)
+ end
+
+ it 'deals with nested laziness' do
+ expect(described_class.new { load(10) }.force).to eq(100)
+ expect(described_class.new { described_class.new { 5 } }.force).to eq 5
+ end
+ end
+
+ describe '.with_value' do
+ let(:inner) { described_class.new { value.x } }
+
+ subject { described_class.with_value(inner) { |x| x.to_s } }
+
+ it 'defers the application of a block to a value' do
+ expect(value).not_to receive(:x)
+
+ expect(subject).to be_an_instance_of(described_class)
+ end
+
+ it 'evaluates to the application of the block to the value' do
+ expect(value).to receive(:x).once
+
+ expect(subject.force).to eq(inner.force.to_s)
+ end
+ end
+
+ describe '.force' do
+ context 'when given a plain value' do
+ subject { described_class.force(1) }
+
+ it 'unwraps the value' do
+ expect(subject).to be 1
+ end
+ end
+
+ context 'when given a wrapped lazy value' do
+ subject { described_class.force(described_class.new { 2 }) }
+
+ it 'unwraps the value' do
+ expect(subject).to be 2
+ end
+ end
+
+ context 'when the value is from a batchloader' do
+ subject { described_class.force(load(3)) }
+
+ it 'syncs the value' do
+ expect(subject).to be 9
+ end
+ end
+
+ context 'when the value is a GraphQL lazy' do
+ subject { described_class.force(GitlabSchema.after_lazy(load(3)) { |x| x + 1 } ) }
+
+ it 'forces the evaluation' do
+ expect(subject).to be 10
+ end
+ end
+
+ context 'when the value is a promise' do
+ subject { described_class.force(::Concurrent::Promise.new { 4 }) }
+
+ it 'executes the promise and waits for the value' do
+ expect(subject).to be 4
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/loaders/batch_model_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/batch_model_loader_spec.rb
index cf1f00bc176..7ae33346388 100644
--- a/spec/lib/gitlab/graphql/loaders/batch_model_loader_spec.rb
+++ b/spec/lib/gitlab/graphql/loaders/batch_model_loader_spec.rb
@@ -4,8 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::Loaders::BatchModelLoader do
describe '#find' do
- let(:issue) { create(:issue) }
- let(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:user) { create(:user) }
it 'finds a model by id' do
issue_result = described_class.new(Issue, issue.id).find
@@ -16,15 +17,25 @@ RSpec.describe Gitlab::Graphql::Loaders::BatchModelLoader do
end
it 'only queries once per model' do
- other_user = create(:user)
- user
- issue
-
expect do
[described_class.new(User, other_user.id).find,
described_class.new(User, user.id).find,
described_class.new(Issue, issue.id).find].map(&:sync)
end.not_to exceed_query_limit(2)
end
+
+ it 'does not force values unnecessarily' do
+ expect do
+ a = described_class.new(User, user.id).find
+ b = described_class.new(Issue, issue.id).find
+
+ b.sync
+
+ c = described_class.new(User, other_user.id).find
+
+ a.sync
+ c.sync
+ end.not_to exceed_query_limit(2)
+ end
end
end
diff --git a/spec/lib/gitlab/hook_data/release_builder_spec.rb b/spec/lib/gitlab/hook_data/release_builder_spec.rb
new file mode 100644
index 00000000000..b630780b162
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/release_builder_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::ReleaseBuilder do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let(:release) { create(:release, project: project) }
+ let(:builder) { described_class.new(release) }
+
+ describe '#build' do
+ let(:data) { builder.build('create') }
+
+ it 'includes safe attribute' do
+ %w[
+ id
+ created_at
+ description
+ name
+ released_at
+ tag
+ ].each do |key|
+ expect(data).to include(key)
+ end
+ end
+
+ it 'includes additional attrs' do
+ expect(data[:object_kind]).to eq('release')
+ expect(data[:project]).to eq(builder.release.project.hook_attrs.with_indifferent_access)
+ expect(data[:action]).to eq('create')
+ expect(data).to include(:assets)
+ expect(data).to include(:commit)
+ end
+
+ context 'when the Release has an image in the description' do
+ let(:release_with_description) do
+ create(:release, project: project, description: 'test![Release_Image](/uploads/abc/Release_Image.png)')
+ end
+
+ let(:builder) { described_class.new(release_with_description) }
+
+ it 'sets the image to use an absolute URL' do
+ expected_path = "#{release_with_description.project.path_with_namespace}/uploads/abc/Release_Image.png"
+
+ expect(data[:description])
+ .to eq("test![Release_Image](#{Settings.gitlab.url}/#{expected_path})")
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/i18n/po_linter_spec.rb b/spec/lib/gitlab/i18n/po_linter_spec.rb
index 9165ccfb1ef..e04c0b49480 100644
--- a/spec/lib/gitlab/i18n/po_linter_spec.rb
+++ b/spec/lib/gitlab/i18n/po_linter_spec.rb
@@ -461,9 +461,10 @@ RSpec.describe Gitlab::I18n::PoLinter do
fake_metadata = double
allow(fake_metadata).to receive(:forms_to_test).and_return(4)
allow(linter).to receive(:metadata_entry).and_return(fake_metadata)
- allow(linter).to receive(:locale).and_return('pl_PL')
- numbers = linter.numbers_covering_all_plurals
+ numbers = Gitlab::I18n.with_locale('pl_PL') do
+ linter.numbers_covering_all_plurals
+ end
expect(numbers).to contain_exactly(0, 1, 2)
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 5ee7fb2adbf..38fe2781331 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -179,6 +179,7 @@ merge_requests:
- user_mentions
- system_note_metadata
- note_authors
+- cleanup_schedule
external_pull_requests:
- project
merge_request_diff:
@@ -195,6 +196,8 @@ merge_request_diff_files:
merge_request_context_commits:
- merge_request
- diff_files
+cleanup_schedule:
+- merge_request
ci_pipelines:
- project
- user
@@ -240,6 +243,7 @@ ci_pipelines:
- vulnerability_findings
- pipeline_config
- security_scans
+- security_findings
- daily_build_group_report_results
- latest_builds
- daily_report_results
@@ -317,6 +321,7 @@ push_access_levels:
- protected_branch
- user
- group
+- deploy_key
create_access_levels:
- user
- protected_tag
@@ -652,6 +657,7 @@ milestone_releases:
evidences:
- release
design: &design
+- authors
- issue
- actions
- versions
diff --git a/spec/lib/gitlab/import_export/attributes_finder_spec.rb b/spec/lib/gitlab/import_export/attributes_finder_spec.rb
index 7f6ebf577af..428d8d605ee 100644
--- a/spec/lib/gitlab/import_export/attributes_finder_spec.rb
+++ b/spec/lib/gitlab/import_export/attributes_finder_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::ImportExport::AttributesFinder do
end
before do
- allow_any_instance_of(Gitlab::ImportExport).to receive(:config_file).and_return(test_config)
+ allow(Gitlab::ImportExport).to receive(:config_file).and_return(test_config)
end
it 'generates hash from project tree config' do
diff --git a/spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb
index 6b324b952dc..9e1571ae3d8 100644
--- a/spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::ImportExport::Group::LegacyTreeSaver do
before do
group.add_maintainer(user)
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
end
after do
diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb
index dc44296321c..0db038785d3 100644
--- a/spec/lib/gitlab/import_export/importer_spec.rb
+++ b/spec/lib/gitlab/import_export/importer_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::ImportExport::Importer do
subject(:importer) { described_class.new(project) }
before do
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(test_path)
+ allow(Gitlab::ImportExport).to receive(:storage_path).and_return(test_path)
allow_any_instance_of(Gitlab::ImportExport::FileImporter).to receive(:remove_import_file)
stub_uploads_object_storage(FileUploader)
@@ -65,10 +65,22 @@ RSpec.describe Gitlab::ImportExport::Importer do
end
end
- it 'restores the ProjectTree' do
- expect(Gitlab::ImportExport::Project::TreeRestorer).to receive(:new).and_call_original
+ context 'with sample_data_template' do
+ it 'initializes the Sample::TreeRestorer' do
+ project.create_or_update_import_data(data: { sample_data: true })
- importer.execute
+ expect(Gitlab::ImportExport::Project::Sample::TreeRestorer).to receive(:new).and_call_original
+
+ importer.execute
+ end
+ end
+
+ context 'without sample_data_template' do
+ it 'initializes the ProjectTree' do
+ expect(Gitlab::ImportExport::Project::TreeRestorer).to receive(:new).and_call_original
+
+ importer.execute
+ end
end
it 'removes the import file' do
diff --git a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
index e208a1c383c..b477ac45577 100644
--- a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
+++ b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
@@ -67,6 +67,14 @@ RSpec.describe Gitlab::ImportExport::JSON::NdjsonReader do
it 'yields nothing to the Enumerator' do
expect(subject.to_a).to eq([])
end
+
+ context 'with mark_as_consumed: false' do
+ subject { ndjson_reader.consume_relation(importable_path, key, mark_as_consumed: false) }
+
+ it 'yields every relation value to the Enumerator' do
+ expect(subject.count).to eq(1)
+ end
+ end
end
context 'key has not been consumed' do
@@ -102,14 +110,4 @@ RSpec.describe Gitlab::ImportExport::JSON::NdjsonReader do
end
end
end
-
- describe '#clear_consumed_relations' do
- let(:dir_path) { fixture }
-
- subject { ndjson_reader.clear_consumed_relations }
-
- it 'returns empty set' do
- expect(subject).to be_empty
- end
- end
end
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 949cfb5a34d..762687beedb 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do
group: group,
approvals_before_merge: 1)
end
+
let_it_be(:issue) do
create(:issue,
assignees: [user],
diff --git a/spec/lib/gitlab/import_export/lfs_restorer_spec.rb b/spec/lib/gitlab/import_export/lfs_restorer_spec.rb
index a9f7fb72612..c8887b0ded1 100644
--- a/spec/lib/gitlab/import_export/lfs_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/lfs_restorer_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::ImportExport::LfsRestorer do
subject(:restorer) { described_class.new(project: project, shared: shared) }
before do
- allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
+ allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
FileUtils.mkdir_p(shared.export_path)
end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 50bc6a30044..56ba730e893 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -61,6 +61,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
'enable_ssl_verification' => true,
'job_events' => false,
'wiki_page_events' => true,
+ 'releases_events' => false,
'token' => token
}
end
diff --git a/spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb b/spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb
index 82f59245519..645242c6f05 100644
--- a/spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb
+++ b/spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::ImportExport::Project::Sample::DateCalculator do
end
context 'when dates are not empty' do
- let(:dates) { [[nil, '2020-01-01 00:00:00 +0000'], [nil, '2021-01-01 00:00:00 +0000'], [nil, '2022-01-01 23:59:59 +0000']] }
+ let(:dates) { [nil, '2020-01-01 00:00:00 +0000', '2021-01-01 00:00:00 +0000', nil, '2022-01-01 23:59:59 +0000'] }
it { is_expected.to eq(Time.zone.parse('2021-01-01 00:00:00 +0000')) }
end
diff --git a/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
new file mode 100644
index 00000000000..86d5f2402f8
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::Project::Sample::RelationFactory do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, :repository, group: group) }
+ let(:members_mapper) { double('members_mapper').as_null_object }
+ let(:admin) { create(:admin) }
+ let(:importer_user) { admin }
+ let(:excluded_keys) { [] }
+ let(:date_calculator) { instance_double(Gitlab::ImportExport::Project::Sample::DateCalculator) }
+ let(:original_project_id) { 8 }
+ let(:start_date) { Time.current - 30.days }
+ let(:due_date) { Time.current - 20.days }
+ let(:created_object) do
+ described_class.create( # rubocop:disable Rails/SaveBang
+ relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
+ members_mapper: members_mapper,
+ user: importer_user,
+ importable: project,
+ excluded_keys: excluded_keys,
+ date_calculator: date_calculator
+ )
+ end
+
+ context 'issue object' do
+ let(:relation_sym) { :issues }
+ let(:id) { 999 }
+
+ let(:relation_hash) do
+ {
+ 'id' => id,
+ 'title' => 'Necessitatibus magnam qui at velit consequatur perspiciatis.',
+ 'project_id' => original_project_id,
+ 'created_at' => '2016-08-12T09:41:03.462Z',
+ 'updated_at' => '2016-08-12T09:41:03.462Z',
+ 'description' => 'Molestiae corporis magnam et fugit aliquid nulla quia.',
+ 'state' => 'closed',
+ 'position' => 0,
+ 'confidential' => false,
+ 'due_date' => due_date
+ }
+ end
+
+ before do
+ allow(date_calculator).to receive(:closest_date_to_average) { Time.current - 10.days }
+ allow(date_calculator).to receive(:calculate_by_closest_date_to_average)
+ end
+
+ it 'correctly updated due date', :aggregate_failures do
+ expect(date_calculator).to receive(:calculate_by_closest_date_to_average)
+ .with(relation_hash['due_date']).and_return(due_date - 10.days)
+
+ expect(created_object.due_date).to eq((due_date - 10.days).to_date)
+ end
+ end
+
+ context 'milestone object' do
+ let(:relation_sym) { :milestones }
+ let(:id) { 1001 }
+
+ let(:relation_hash) do
+ {
+ 'id' => id,
+ 'title' => 'v3.0',
+ 'project_id' => original_project_id,
+ 'created_at' => '2016-08-12T09:41:03.462Z',
+ 'updated_at' => '2016-08-12T09:41:03.462Z',
+ 'description' => 'Rerum at autem exercitationem ea voluptates harum quam placeat.',
+ 'state' => 'closed',
+ 'start_date' => start_date,
+ 'due_date' => due_date
+ }
+ end
+
+ before do
+ allow(date_calculator).to receive(:closest_date_to_average).twice { Time.current - 10.days }
+ allow(date_calculator).to receive(:calculate_by_closest_date_to_average).twice
+ end
+
+ it 'correctly updated due date', :aggregate_failures do
+ expect(date_calculator).to receive(:calculate_by_closest_date_to_average)
+ .with(relation_hash['due_date']).and_return(due_date - 10.days)
+
+ expect(created_object.due_date).to eq((due_date - 10.days).to_date)
+ end
+
+ it 'correctly updated start date', :aggregate_failures do
+ expect(date_calculator).to receive(:calculate_by_closest_date_to_average)
+ .with(relation_hash['start_date']).and_return(start_date - 20.days)
+
+ expect(created_object.start_date).to eq((start_date - 20.days).to_date)
+ end
+ end
+
+ context 'milestone object' do
+ let(:relation_sym) { :milestones }
+ let(:id) { 1001 }
+
+ let(:relation_hash) do
+ {
+ 'id' => id,
+ 'title' => 'v3.0',
+ 'project_id' => original_project_id,
+ 'created_at' => '2016-08-12T09:41:03.462Z',
+ 'updated_at' => '2016-08-12T09:41:03.462Z',
+ 'description' => 'Rerum at autem exercitationem ea voluptates harum quam placeat.',
+ 'state' => 'closed',
+ 'start_date' => start_date,
+ 'due_date' => due_date
+ }
+ end
+
+ before do
+ allow(date_calculator).to receive(:closest_date_to_average).twice { Time.current - 10.days }
+ allow(date_calculator).to receive(:calculate_by_closest_date_to_average).twice
+ end
+
+ it 'correctly updated due date', :aggregate_failures do
+ expect(date_calculator).to receive(:calculate_by_closest_date_to_average)
+ .with(relation_hash['due_date']).and_return(due_date - 10.days)
+
+ expect(created_object.due_date).to eq((due_date - 10.days).to_date)
+ end
+
+ it 'correctly updated start date', :aggregate_failures do
+ expect(date_calculator).to receive(:calculate_by_closest_date_to_average)
+ .with(relation_hash['start_date']).and_return(start_date - 20.days)
+
+ expect(created_object.start_date).to eq((start_date - 20.days).to_date)
+ end
+ end
+
+ context 'hook object' do
+ let(:relation_sym) { :hooks }
+ let(:id) { 999 }
+ let(:service_id) { 99 }
+ let(:token) { 'secret' }
+
+ let(:relation_hash) do
+ {
+ 'id' => id,
+ 'url' => 'https://example.json',
+ 'project_id' => original_project_id,
+ 'created_at' => '2016-08-12T09:41:03.462Z',
+ 'updated_at' => '2016-08-12T09:41:03.462Z',
+ 'service_id' => service_id,
+ 'push_events' => true,
+ 'issues_events' => false,
+ 'confidential_issues_events' => false,
+ 'merge_requests_events' => true,
+ 'tag_push_events' => false,
+ 'note_events' => true,
+ 'enable_ssl_verification' => true,
+ 'job_events' => false,
+ 'wiki_page_events' => true,
+ 'token' => token
+ }
+ end
+
+ it 'does not calculate the closest date to average' do
+ expect(date_calculator).not_to receive(:calculate_by_closest_date_to_average)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/sample/sample_data_relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb
index f173345a4c6..f87f79d4462 100644
--- a/spec/lib/gitlab/import_export/project/sample/sample_data_relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb
@@ -9,7 +9,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::Project::Sample::SampleDataRelationTreeRestorer do
+RSpec.describe Gitlab::ImportExport::Project::Sample::RelationTreeRestorer do
include_context 'relation tree restorer shared context'
let(:sample_data_relation_tree_restorer) do
@@ -74,13 +74,26 @@ RSpec.describe Gitlab::ImportExport::Project::Sample::SampleDataRelationTreeRest
let(:importable_name) { 'project' }
let(:importable_path) { 'project' }
let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder }
- let(:relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
+ let(:relation_factory) { Gitlab::ImportExport::Project::Sample::RelationFactory }
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
+ let(:path) { 'spec/fixtures/lib/gitlab/import_export/sample_data/tree' }
+ let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) }
- context 'using ndjson reader' do
- let(:path) { 'spec/fixtures/lib/gitlab/import_export/sample_data/tree' }
- let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) }
+ it 'initializes relation_factory with date_calculator as parameter' do
+ expect(Gitlab::ImportExport::Project::Sample::RelationFactory).to receive(:create).with(hash_including(:date_calculator)).at_least(:once).times
+
+ subject
+ end
+
+ context 'when relation tree restorer is initialized' do
+ it 'initializes date calculator with due dates' do
+ expect(Gitlab::ImportExport::Project::Sample::DateCalculator).to receive(:new).with(Array)
+ sample_data_relation_tree_restorer
+ end
+ end
+
+ context 'using ndjson reader' do
it_behaves_like 'import project successfully'
end
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index c05968c9a85..fd3b71deb37 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -681,13 +681,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
it 'overrides project feature access levels' do
- access_level_keys = project.project_feature.attributes.keys.select { |a| a =~ /_access_level/ }
-
- # `pages_access_level` is not included, since it is not available in the public API
- # and has a dependency on project's visibility level
- # see ProjectFeature model
- access_level_keys.delete('pages_access_level')
-
+ access_level_keys = ProjectFeature.available_features.map { |feature| ProjectFeature.access_level_attribute(feature) }
disabled_access_levels = Hash[access_level_keys.collect { |item| [item, 'disabled'] }]
project.create_import_data(data: { override_params: disabled_access_levels })
@@ -979,6 +973,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
create(:project, :builds_disabled, :issues_disabled,
{ name: 'project', path: 'project' })
end
+
let(:shared) { project.import_export_shared }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
@@ -1040,41 +1035,6 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
it_behaves_like 'project tree restorer work properly', :legacy_reader, true
it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
-
- context 'Sample Data JSON' do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
- let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
-
- before do
- setup_import_export_config('sample_data')
- setup_reader(:ndjson_reader)
- end
-
- context 'with sample_data_template' do
- before do
- allow(project).to receive_message_chain(:import_data, :data, :dig).with('sample_data') { true }
- end
-
- it 'initialize SampleDataRelationTreeRestorer' do
- expect_next_instance_of(Gitlab::ImportExport::Project::Sample::SampleDataRelationTreeRestorer) do |restorer|
- expect(restorer).to receive(:restore).and_return(true)
- end
-
- expect(project_tree_restorer.restore).to eq(true)
- end
- end
-
- context 'without sample_data_template' do
- it 'initialize RelationTreeRestorer' do
- expect_next_instance_of(Gitlab::ImportExport::RelationTreeRestorer) do |restorer|
- expect(restorer).to receive(:restore).and_return(true)
- end
-
- expect(project_tree_restorer.restore).to eq(true)
- end
- end
- end
end
context 'disable ndjson import' do
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index e3d1f2c9368..b33462b4096 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -489,6 +489,7 @@ ProjectHook:
- confidential_issues_events
- confidential_note_events
- repository_update_events
+- releases_events
ProtectedBranch:
- id
- project_id
@@ -575,6 +576,7 @@ ProjectFeature:
- repository_access_level
- pages_access_level
- metrics_dashboard_access_level
+- requirements_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -683,6 +685,7 @@ ProjectCiCdSetting:
ProjectSetting:
- allow_merge_on_skipped_pipeline
- has_confluence
+- has_vulnerabilities
ProtectedEnvironment:
- id
- project_id
@@ -771,6 +774,7 @@ ExternalPullRequest:
- target_sha
DesignManagement::Design:
- id
+- iid
- project_id
- filename
- relative_position
@@ -858,3 +862,25 @@ ProjectSecuritySetting:
IssuableSla:
- issue_id
- due_at
+PushRule:
+ - force_push_regex
+ - delete_branch_regex
+ - commit_message_regex
+ - author_email_regex
+ - file_name_regex
+ - branch_name_regex
+ - commit_message_negative_regex
+ - max_file_size
+ - deny_delete_tag
+ - member_check
+ - is_sample
+ - prevent_secrets
+ - reject_unsigned_commits
+ - commit_committer_check
+ - regexp_uses_re2
+MergeRequest::CleanupSchedule:
+- id
+- scheduled_at
+- completed_at
+- created_at
+- updated_at
diff --git a/spec/lib/gitlab/import_export/uploads_manager_spec.rb b/spec/lib/gitlab/import_export/uploads_manager_spec.rb
index 33ad0e12c37..8282ad9a070 100644
--- a/spec/lib/gitlab/import_export/uploads_manager_spec.rb
+++ b/spec/lib/gitlab/import_export/uploads_manager_spec.rb
@@ -23,13 +23,13 @@ RSpec.describe Gitlab::ImportExport::UploadsManager do
end
describe '#save' do
+ before do
+ project.uploads << upload
+ end
+
context 'when the project has uploads locally stored' do
let(:upload) { create(:upload, :issuable_upload, :with_file, model: project) }
- before do
- project.uploads << upload
- end
-
it 'does not cause errors' do
manager.save
@@ -74,6 +74,22 @@ RSpec.describe Gitlab::ImportExport::UploadsManager do
end
end
end
+
+ context 'when upload is in object storage' do
+ before do
+ stub_uploads_object_storage(FileUploader)
+ allow(manager).to receive(:download_or_copy_upload).and_raise(Errno::ENAMETOOLONG)
+ end
+
+ it 'ignores problematic upload and logs exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(Errno::ENAMETOOLONG), project_id: project.id)
+
+ manager.save
+
+ expect(shared.errors).to be_empty
+ expect(File).not_to exist(exported_file_path)
+ end
+ end
end
describe '#restore' do
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 35bbdcdccd6..88f2def34d9 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -97,6 +97,16 @@ RSpec.describe Gitlab::InstrumentationHelper do
expect(payload[:gitaly_duration]).to be_nil
end
end
+
+ context 'when the request matched a Rack::Attack safelist' do
+ it 'logs the safelist name' do
+ Gitlab::Instrumentation::Throttle.safelist = 'foobar'
+
+ subject
+
+ expect(payload[:throttle_safelist]).to eq('foobar')
+ end
+ end
end
describe '.queue_duration_for_job' do
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index c8cecb576da..2b602c80640 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::JiraImport do
let_it_be(:project, reload: true) { create(:project) }
let(:additional_params) { {} }
- subject { described_class.validate_project_settings!(project, additional_params) }
+ subject { described_class.validate_project_settings!(project, **additional_params) }
shared_examples 'raise Jira import error' do |message|
it 'returns error' do
diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb
index 0402296a3a8..59ec94f2855 100644
--- a/spec/lib/gitlab/json_spec.rb
+++ b/spec/lib/gitlab/json_spec.rb
@@ -7,342 +7,306 @@ RSpec.describe Gitlab::Json do
stub_feature_flags(json_wrapper_legacy_mode: true)
end
- shared_examples "json" do
- describe ".parse" do
- context "legacy_mode is disabled by default" do
- it "parses an object" do
- expect(subject.parse('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
- end
-
- it "parses an array" do
- expect(subject.parse('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
- end
-
- it "parses a string" do
- expect(subject.parse('"foo"', legacy_mode: false)).to eq("foo")
- end
-
- it "parses a true bool" do
- expect(subject.parse("true", legacy_mode: false)).to be(true)
- end
-
- it "parses a false bool" do
- expect(subject.parse("false", legacy_mode: false)).to be(false)
- end
+ describe ".parse" do
+ context "legacy_mode is disabled by default" do
+ it "parses an object" do
+ expect(subject.parse('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
end
- context "legacy_mode is enabled" do
- it "parses an object" do
- expect(subject.parse('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
- end
-
- it "parses an array" do
- expect(subject.parse('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
- end
-
- it "raises an error on a string" do
- expect { subject.parse('"foo"', legacy_mode: true) }.to raise_error(JSON::ParserError)
- end
-
- it "raises an error on a true bool" do
- expect { subject.parse("true", legacy_mode: true) }.to raise_error(JSON::ParserError)
- end
-
- it "raises an error on a false bool" do
- expect { subject.parse("false", legacy_mode: true) }.to raise_error(JSON::ParserError)
- end
+ it "parses an array" do
+ expect(subject.parse('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
end
- context "feature flag is disabled" do
- before do
- stub_feature_flags(json_wrapper_legacy_mode: false)
- end
-
- it "parses an object" do
- expect(subject.parse('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
- end
-
- it "parses an array" do
- expect(subject.parse('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
- end
-
- it "parses a string" do
- expect(subject.parse('"foo"', legacy_mode: true)).to eq("foo")
- end
+ it "parses a string" do
+ expect(subject.parse('"foo"', legacy_mode: false)).to eq("foo")
+ end
- it "parses a true bool" do
- expect(subject.parse("true", legacy_mode: true)).to be(true)
- end
+ it "parses a true bool" do
+ expect(subject.parse("true", legacy_mode: false)).to be(true)
+ end
- it "parses a false bool" do
- expect(subject.parse("false", legacy_mode: true)).to be(false)
- end
+ it "parses a false bool" do
+ expect(subject.parse("false", legacy_mode: false)).to be(false)
end
end
- describe ".parse!" do
- context "legacy_mode is disabled by default" do
- it "parses an object" do
- expect(subject.parse!('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
- end
+ context "legacy_mode is enabled" do
+ it "parses an object" do
+ expect(subject.parse('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
+ end
- it "parses an array" do
- expect(subject.parse!('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
- end
+ it "parses an array" do
+ expect(subject.parse('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
+ end
- it "parses a string" do
- expect(subject.parse!('"foo"', legacy_mode: false)).to eq("foo")
- end
+ it "raises an error on a string" do
+ expect { subject.parse('"foo"', legacy_mode: true) }.to raise_error(JSON::ParserError)
+ end
- it "parses a true bool" do
- expect(subject.parse!("true", legacy_mode: false)).to be(true)
- end
+ it "raises an error on a true bool" do
+ expect { subject.parse("true", legacy_mode: true) }.to raise_error(JSON::ParserError)
+ end
- it "parses a false bool" do
- expect(subject.parse!("false", legacy_mode: false)).to be(false)
- end
+ it "raises an error on a false bool" do
+ expect { subject.parse("false", legacy_mode: true) }.to raise_error(JSON::ParserError)
end
+ end
- context "legacy_mode is enabled" do
- it "parses an object" do
- expect(subject.parse!('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
- end
+ context "feature flag is disabled" do
+ before do
+ stub_feature_flags(json_wrapper_legacy_mode: false)
+ end
- it "parses an array" do
- expect(subject.parse!('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
- end
+ it "parses an object" do
+ expect(subject.parse('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
+ end
- it "raises an error on a string" do
- expect { subject.parse!('"foo"', legacy_mode: true) }.to raise_error(JSON::ParserError)
- end
+ it "parses an array" do
+ expect(subject.parse('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
+ end
- it "raises an error on a true bool" do
- expect { subject.parse!("true", legacy_mode: true) }.to raise_error(JSON::ParserError)
- end
+ it "parses a string" do
+ expect(subject.parse('"foo"', legacy_mode: true)).to eq("foo")
+ end
- it "raises an error on a false bool" do
- expect { subject.parse!("false", legacy_mode: true) }.to raise_error(JSON::ParserError)
- end
+ it "parses a true bool" do
+ expect(subject.parse("true", legacy_mode: true)).to be(true)
end
- context "feature flag is disabled" do
- before do
- stub_feature_flags(json_wrapper_legacy_mode: false)
- end
+ it "parses a false bool" do
+ expect(subject.parse("false", legacy_mode: true)).to be(false)
+ end
+ end
+ end
- it "parses an object" do
- expect(subject.parse!('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
- end
+ describe ".parse!" do
+ context "legacy_mode is disabled by default" do
+ it "parses an object" do
+ expect(subject.parse!('{ "foo": "bar" }')).to eq({ "foo" => "bar" })
+ end
- it "parses an array" do
- expect(subject.parse!('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
- end
+ it "parses an array" do
+ expect(subject.parse!('[{ "foo": "bar" }]')).to eq([{ "foo" => "bar" }])
+ end
- it "parses a string" do
- expect(subject.parse!('"foo"', legacy_mode: true)).to eq("foo")
- end
+ it "parses a string" do
+ expect(subject.parse!('"foo"', legacy_mode: false)).to eq("foo")
+ end
- it "parses a true bool" do
- expect(subject.parse!("true", legacy_mode: true)).to be(true)
- end
+ it "parses a true bool" do
+ expect(subject.parse!("true", legacy_mode: false)).to be(true)
+ end
- it "parses a false bool" do
- expect(subject.parse!("false", legacy_mode: true)).to be(false)
- end
+ it "parses a false bool" do
+ expect(subject.parse!("false", legacy_mode: false)).to be(false)
end
end
- describe ".dump" do
- it "dumps an object" do
- expect(subject.dump({ "foo" => "bar" })).to eq('{"foo":"bar"}')
+ context "legacy_mode is enabled" do
+ it "parses an object" do
+ expect(subject.parse!('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
end
- it "dumps an array" do
- expect(subject.dump([{ "foo" => "bar" }])).to eq('[{"foo":"bar"}]')
+ it "parses an array" do
+ expect(subject.parse!('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
end
- it "dumps a string" do
- expect(subject.dump("foo")).to eq('"foo"')
+ it "raises an error on a string" do
+ expect { subject.parse!('"foo"', legacy_mode: true) }.to raise_error(JSON::ParserError)
end
- it "dumps a true bool" do
- expect(subject.dump(true)).to eq("true")
+ it "raises an error on a true bool" do
+ expect { subject.parse!("true", legacy_mode: true) }.to raise_error(JSON::ParserError)
end
- it "dumps a false bool" do
- expect(subject.dump(false)).to eq("false")
+ it "raises an error on a false bool" do
+ expect { subject.parse!("false", legacy_mode: true) }.to raise_error(JSON::ParserError)
end
end
- describe ".generate" do
- let(:obj) do
- { test: true, "foo.bar" => "baz", is_json: 1, some: [1, 2, 3] }
+ context "feature flag is disabled" do
+ before do
+ stub_feature_flags(json_wrapper_legacy_mode: false)
end
- it "generates JSON" do
- expected_string = <<~STR.chomp
- {"test":true,"foo.bar":"baz","is_json":1,"some":[1,2,3]}
- STR
+ it "parses an object" do
+ expect(subject.parse!('{ "foo": "bar" }', legacy_mode: true)).to eq({ "foo" => "bar" })
+ end
- expect(subject.generate(obj)).to eq(expected_string)
+ it "parses an array" do
+ expect(subject.parse!('[{ "foo": "bar" }]', legacy_mode: true)).to eq([{ "foo" => "bar" }])
end
- it "allows you to customise the output" do
- opts = {
- indent: " ",
- space: " ",
- space_before: " ",
- object_nl: "\n",
- array_nl: "\n"
- }
+ it "parses a string" do
+ expect(subject.parse!('"foo"', legacy_mode: true)).to eq("foo")
+ end
- json = subject.generate(obj, opts)
-
- expected_string = <<~STR.chomp
- {
- "test" : true,
- "foo.bar" : "baz",
- "is_json" : 1,
- "some" : [
- 1,
- 2,
- 3
- ]
- }
- STR
+ it "parses a true bool" do
+ expect(subject.parse!("true", legacy_mode: true)).to be(true)
+ end
- expect(json).to eq(expected_string)
+ it "parses a false bool" do
+ expect(subject.parse!("false", legacy_mode: true)).to be(false)
end
end
+ end
- describe ".pretty_generate" do
- let(:obj) do
- {
- test: true,
- "foo.bar" => "baz",
- is_json: 1,
- some: [1, 2, 3],
- more: { test: true },
- multi_line_empty_array: [],
- multi_line_empty_obj: {}
- }
- end
+ describe ".dump" do
+ it "dumps an object" do
+ expect(subject.dump({ "foo" => "bar" })).to eq('{"foo":"bar"}')
+ end
- it "generates pretty JSON" do
- expected_string = <<~STR.chomp
- {
- "test": true,
- "foo.bar": "baz",
- "is_json": 1,
- "some": [
- 1,
- 2,
- 3
- ],
- "more": {
- "test": true
- },
- "multi_line_empty_array": [
-
- ],
- "multi_line_empty_obj": {
- }
- }
- STR
+ it "dumps an array" do
+ expect(subject.dump([{ "foo" => "bar" }])).to eq('[{"foo":"bar"}]')
+ end
- expect(subject.pretty_generate(obj)).to eq(expected_string)
- end
+ it "dumps a string" do
+ expect(subject.dump("foo")).to eq('"foo"')
+ end
- it "allows you to customise the output" do
- opts = {
- space_before: " "
- }
+ it "dumps a true bool" do
+ expect(subject.dump(true)).to eq("true")
+ end
- json = subject.pretty_generate(obj, opts)
-
- expected_string = <<~STR.chomp
- {
- "test" : true,
- "foo.bar" : "baz",
- "is_json" : 1,
- "some" : [
- 1,
- 2,
- 3
- ],
- "more" : {
- "test" : true
- },
- "multi_line_empty_array" : [
-
- ],
- "multi_line_empty_obj" : {
- }
- }
- STR
+ it "dumps a false bool" do
+ expect(subject.dump(false)).to eq("false")
+ end
+ end
- expect(json).to eq(expected_string)
- end
+ describe ".generate" do
+ let(:obj) do
+ { test: true, "foo.bar" => "baz", is_json: 1, some: [1, 2, 3] }
end
- context "the feature table is missing" do
- before do
- allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
- end
+ it "generates JSON" do
+ expected_string = <<~STR.chomp
+ {"test":true,"foo.bar":"baz","is_json":1,"some":[1,2,3]}
+ STR
+
+ expect(subject.generate(obj)).to eq(expected_string)
+ end
- it "skips legacy mode handling" do
- expect(Feature).not_to receive(:enabled?).with(:json_wrapper_legacy_mode, default_enabled: true)
+ it "allows you to customise the output" do
+ opts = {
+ indent: " ",
+ space: " ",
+ space_before: " ",
+ object_nl: "\n",
+ array_nl: "\n"
+ }
- subject.send(:handle_legacy_mode!, {})
- end
+ json = subject.generate(obj, opts)
- it "skips oj feature detection" do
- expect(Feature).not_to receive(:enabled?).with(:oj_json, default_enabled: true)
+ expected_string = <<~STR.chomp
+ {
+ "test" : true,
+ "foo.bar" : "baz",
+ "is_json" : 1,
+ "some" : [
+ 1,
+ 2,
+ 3
+ ]
+ }
+ STR
- subject.send(:enable_oj?)
- end
+ expect(json).to eq(expected_string)
end
+ end
- context "the database is missing" do
- before do
- allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise(PG::ConnectionBad)
- end
+ describe ".pretty_generate" do
+ let(:obj) do
+ {
+ test: true,
+ "foo.bar" => "baz",
+ is_json: 1,
+ some: [1, 2, 3],
+ more: { test: true },
+ multi_line_empty_array: [],
+ multi_line_empty_obj: {}
+ }
+ end
- it "still parses json" do
- expect(subject.parse("{}")).to eq({})
- end
+ it "generates pretty JSON" do
+ expected_string = <<~STR.chomp
+ {
+ "test": true,
+ "foo.bar": "baz",
+ "is_json": 1,
+ "some": [
+ 1,
+ 2,
+ 3
+ ],
+ "more": {
+ "test": true
+ },
+ "multi_line_empty_array": [
+
+ ],
+ "multi_line_empty_obj": {
+ }
+ }
+ STR
- it "still generates json" do
- expect(subject.dump({})).to eq("{}")
- end
+ expect(subject.pretty_generate(obj)).to eq(expected_string)
+ end
+
+ it "allows you to customise the output" do
+ opts = {
+ space_before: " "
+ }
+
+ json = subject.pretty_generate(obj, opts)
+
+ expected_string = <<~STR.chomp
+ {
+ "test" : true,
+ "foo.bar" : "baz",
+ "is_json" : 1,
+ "some" : [
+ 1,
+ 2,
+ 3
+ ],
+ "more" : {
+ "test" : true
+ },
+ "multi_line_empty_array" : [
+
+ ],
+ "multi_line_empty_obj" : {
+ }
+ }
+ STR
+
+ expect(json).to eq(expected_string)
end
end
- context "oj gem" do
+ context "the feature table is missing" do
before do
- stub_feature_flags(oj_json: true)
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_return(false)
end
- it_behaves_like "json"
+ it "skips legacy mode handling" do
+ expect(Feature).not_to receive(:enabled?).with(:json_wrapper_legacy_mode, default_enabled: true)
- describe "#enable_oj?" do
- it "returns true" do
- expect(subject.enable_oj?).to be(true)
- end
+ subject.send(:handle_legacy_mode!, {})
end
end
- context "json gem" do
+ context "the database is missing" do
before do
- stub_feature_flags(oj_json: false)
+ allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise(PG::ConnectionBad)
end
- it_behaves_like "json"
+ it "still parses json" do
+ expect(subject.parse("{}")).to eq({})
+ end
- describe "#enable_oj?" do
- it "returns false" do
- expect(subject.enable_oj?).to be(false)
- end
+ it "still generates json" do
+ expect(subject.dump({})).to eq("{}")
end
end
@@ -353,47 +317,25 @@ RSpec.describe Gitlab::Json do
let(:env) { {} }
let(:result) { "{\"test\":true}" }
- context "oj is enabled" do
+ context "grape_gitlab_json flag is enabled" do
before do
- stub_feature_flags(oj_json: true)
+ stub_feature_flags(grape_gitlab_json: true)
end
- context "grape_gitlab_json flag is enabled" do
- before do
- stub_feature_flags(grape_gitlab_json: true)
- end
-
- it "generates JSON" do
- expect(subject).to eq(result)
- end
-
- it "uses Gitlab::Json" do
- expect(Gitlab::Json).to receive(:dump).with(obj)
-
- subject
- end
+ it "generates JSON" do
+ expect(subject).to eq(result)
end
- context "grape_gitlab_json flag is disabled" do
- before do
- stub_feature_flags(grape_gitlab_json: false)
- end
-
- it "generates JSON" do
- expect(subject).to eq(result)
- end
+ it "uses Gitlab::Json" do
+ expect(Gitlab::Json).to receive(:dump).with(obj)
- it "uses Grape::Formatter::Json" do
- expect(Grape::Formatter::Json).to receive(:call).with(obj, env)
-
- subject
- end
+ subject
end
end
- context "oj is disabled" do
+ context "grape_gitlab_json flag is disabled" do
before do
- stub_feature_flags(oj_json: false)
+ stub_feature_flags(grape_gitlab_json: false)
end
it "generates JSON" do
diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
index bcc95bdbf2b..e022f5bd912 100644
--- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Kubernetes::Helm::API do
let(:files) { {} }
let(:command) do
- Gitlab::Kubernetes::Helm::InstallCommand.new(
+ Gitlab::Kubernetes::Helm::V2::InstallCommand.new(
name: application_name,
chart: 'chart-name',
rbac: rbac,
@@ -142,7 +142,7 @@ RSpec.describe Gitlab::Kubernetes::Helm::API do
end
context 'with a service account' do
- let(:command) { Gitlab::Kubernetes::Helm::InitCommand.new(name: application_name, files: files, rbac: rbac) }
+ let(:command) { Gitlab::Kubernetes::Helm::V2::InitCommand.new(name: application_name, files: files, rbac: rbac) }
context 'rbac-enabled cluster' do
let(:rbac) { true }
diff --git a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
index 54e3289dd25..6d97790fc8b 100644
--- a/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/pod_spec.rb
@@ -4,75 +4,84 @@ require 'spec_helper'
RSpec.describe Gitlab::Kubernetes::Helm::Pod do
describe '#generate' do
- let(:app) { create(:clusters_applications_prometheus) }
- let(:command) { app.install_command }
- let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
- let(:service_account_name) { nil }
+ using RSpec::Parameterized::TableSyntax
- subject { described_class.new(command, namespace, service_account_name: service_account_name) }
+ where(:helm_major_version, :expected_helm_version, :expected_command_env) do
+ 2 | '2.16.9' | [:TILLER_NAMESPACE]
+ 3 | '3.2.4' | nil
+ end
- context 'with a command' do
- it 'generates a Kubeclient::Resource' do
- expect(subject.generate).to be_a_kind_of(Kubeclient::Resource)
- end
+ with_them do
+ let(:cluster) { create(:cluster, helm_major_version: helm_major_version) }
+ let(:app) { create(:clusters_applications_prometheus, cluster: cluster) }
+ let(:command) { app.install_command }
+ let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
+ let(:service_account_name) { nil }
- it 'generates the appropriate metadata' do
- metadata = subject.generate.metadata
- expect(metadata.name).to eq("install-#{app.name}")
- expect(metadata.namespace).to eq('gitlab-managed-apps')
- expect(metadata.labels['gitlab.org/action']).to eq('install')
- expect(metadata.labels['gitlab.org/application']).to eq(app.name)
- end
+ subject { described_class.new(command, namespace, service_account_name: service_account_name) }
- it 'generates a container spec' do
- spec = subject.generate.spec
- expect(spec.containers.count).to eq(1)
- end
+ context 'with a command' do
+ it 'generates a Kubeclient::Resource' do
+ expect(subject.generate).to be_a_kind_of(Kubeclient::Resource)
+ end
- it 'generates the appropriate specifications for the container' do
- container = subject.generate.spec.containers.first
- expect(container.name).to eq('helm')
- expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.9-kube-1.13.12')
- expect(container.env.count).to eq(3)
- expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT])
- expect(container.command).to match_array(["/bin/sh"])
- expect(container.args).to match_array(["-c", "$(COMMAND_SCRIPT)"])
- end
+ it 'generates the appropriate metadata' do
+ metadata = subject.generate.metadata
+ expect(metadata.name).to eq("install-#{app.name}")
+ expect(metadata.namespace).to eq('gitlab-managed-apps')
+ expect(metadata.labels['gitlab.org/action']).to eq('install')
+ expect(metadata.labels['gitlab.org/application']).to eq(app.name)
+ end
- it 'includes a never restart policy' do
- spec = subject.generate.spec
- expect(spec.restartPolicy).to eq('Never')
- end
+ it 'generates a container spec' do
+ spec = subject.generate.spec
+ expect(spec.containers.count).to eq(1)
+ end
- it 'includes volumes for the container' do
- container = subject.generate.spec.containers.first
- expect(container.volumeMounts.first['name']).to eq('configuration-volume')
- expect(container.volumeMounts.first['mountPath']).to eq("/data/helm/#{app.name}/config")
- end
+ it 'generates the appropriate specifications for the container' do
+ container = subject.generate.spec.containers.first
+ expect(container.name).to eq('helm')
+ expect(container.image).to eq("registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/#{expected_helm_version}-kube-1.13.12-alpine-3.12")
+ expect(container.env.map(&:name)).to include(:HELM_VERSION, :COMMAND_SCRIPT, *expected_command_env)
+ expect(container.command).to match_array(["/bin/sh"])
+ expect(container.args).to match_array(["-c", "$(COMMAND_SCRIPT)"])
+ end
- it 'includes a volume inside the specification' do
- spec = subject.generate.spec
- expect(spec.volumes.first['name']).to eq('configuration-volume')
- end
+ it 'includes a never restart policy' do
+ spec = subject.generate.spec
+ expect(spec.restartPolicy).to eq('Never')
+ end
- it 'mounts configMap specification in the volume' do
- volume = subject.generate.spec.volumes.first
- expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}")
- expect(volume.configMap['items'].first['key']).to eq(:'values.yaml')
- expect(volume.configMap['items'].first['path']).to eq(:'values.yaml')
- end
+ it 'includes volumes for the container' do
+ container = subject.generate.spec.containers.first
+ expect(container.volumeMounts.first['name']).to eq('configuration-volume')
+ expect(container.volumeMounts.first['mountPath']).to eq("/data/helm/#{app.name}/config")
+ end
- it 'has no serviceAccountName' do
- spec = subject.generate.spec
- expect(spec.serviceAccountName).to be_nil
- end
+ it 'includes a volume inside the specification' do
+ spec = subject.generate.spec
+ expect(spec.volumes.first['name']).to eq('configuration-volume')
+ end
- context 'with a service_account_name' do
- let(:service_account_name) { 'sa' }
+ it 'mounts configMap specification in the volume' do
+ volume = subject.generate.spec.volumes.first
+ expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}")
+ expect(volume.configMap['items'].first['key']).to eq(:'values.yaml')
+ expect(volume.configMap['items'].first['path']).to eq(:'values.yaml')
+ end
- it 'uses the serviceAccountName provided' do
+ it 'has no serviceAccountName' do
spec = subject.generate.spec
- expect(spec.serviceAccountName).to eq(service_account_name)
+ expect(spec.serviceAccountName).to be_nil
+ end
+
+ context 'with a service_account_name' do
+ let(:service_account_name) { 'sa' }
+
+ it 'uses the serviceAccountName provided' do
+ spec = subject.generate.spec
+ expect(spec.serviceAccountName).to eq(service_account_name)
+ end
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb
new file mode 100644
index 00000000000..3d2b36b9094
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/helm/v2/base_command_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Helm::V2::BaseCommand do
+ subject(:base_command) do
+ test_class.new(rbac)
+ end
+
+ let(:application) { create(:clusters_applications_helm) }
+ let(:rbac) { false }
+
+ let(:test_class) do
+ Class.new(described_class) do
+ def initialize(rbac)
+ super(
+ name: 'test-class-name',
+ rbac: rbac,
+ files: { some: 'value' }
+ )
+ end
+ end
+ end
+
+ describe 'HELM_VERSION' do
+ subject { described_class::HELM_VERSION }
+
+ it { is_expected.to match /^2\.\d+\.\d+$/ }
+ end
+
+ describe '#env' do
+ subject { base_command.env }
+
+ it { is_expected.to include(TILLER_NAMESPACE: 'gitlab-managed-apps') }
+ end
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) { '' }
+ end
+
+ describe '#pod_name' do
+ subject { base_command.pod_name }
+
+ it { is_expected.to eq('install-test-class-name') }
+ end
+
+ it_behaves_like 'helm command' do
+ let(:command) { base_command }
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/certificate_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/certificate_spec.rb
index b446c5e1149..a3f0fd9eb9b 100644
--- a/spec/lib/gitlab/kubernetes/helm/certificate_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/v2/certificate_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::Kubernetes::Helm::Certificate do
+RSpec.describe Gitlab::Kubernetes::Helm::V2::Certificate do
describe '.generate_root' do
subject { described_class.generate_root }
diff --git a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/delete_command_spec.rb
index ff2c2d76f22..4a3a41dba4a 100644
--- a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/v2/delete_command_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Kubernetes::Helm::DeleteCommand do
+RSpec.describe Gitlab::Kubernetes::Helm::V2::DeleteCommand do
subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) }
let(:app_name) { 'app-name' }
diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/init_command_spec.rb
index d538ed12a07..8ae78ada15c 100644
--- a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/v2/init_command_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Kubernetes::Helm::InitCommand do
+RSpec.describe Gitlab::Kubernetes::Helm::V2::InitCommand do
subject(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac) }
let(:application) { create(:clusters_applications_helm) }
diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/install_command_spec.rb
index 6ed7323c96f..250d1a82e7a 100644
--- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/v2/install_command_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Kubernetes::Helm::InstallCommand do
+RSpec.describe Gitlab::Kubernetes::Helm::V2::InstallCommand do
subject(:install_command) do
described_class.new(
name: 'app-name',
@@ -147,37 +147,6 @@ RSpec.describe Gitlab::Kubernetes::Helm::InstallCommand do
end
end
- context 'when there is no ca.pem file' do
- let(:files) { { 'file.txt': 'some content' } }
-
- it_behaves_like 'helm command generator' do
- let(:commands) do
- <<~EOS
- export HELM_HOST="localhost:44134"
- tiller -listen ${HELM_HOST} -alsologtostderr &
- helm init --client-only
- helm repo add app-name https://repository.example.com
- helm repo update
- #{helm_install_command}
- EOS
- end
-
- let(:helm_install_command) do
- <<~EOS.squish
- helm upgrade app-name chart-name
- --install
- --atomic
- --cleanup-on-fail
- --reset-values
- --version 1.2.3
- --set rbac.create\\=false,rbac.enabled\\=false
- --namespace gitlab-managed-apps
- -f /data/helm/app-name/config/values.yaml
- EOS
- end
- end
- end
-
context 'when there is no version' do
let(:version) { nil }
diff --git a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/patch_command_spec.rb
index 487a38f286d..98eb77d397c 100644
--- a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/v2/patch_command_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Kubernetes::Helm::PatchCommand do
+RSpec.describe Gitlab::Kubernetes::Helm::V2::PatchCommand do
let(:files) { { 'ca.pem': 'some file content' } }
let(:repository) { 'https://repository.example.com' }
let(:rbac) { false }
@@ -69,33 +69,6 @@ RSpec.describe Gitlab::Kubernetes::Helm::PatchCommand do
end
end
- context 'when there is no ca.pem file' do
- let(:files) { { 'file.txt': 'some content' } }
-
- it_behaves_like 'helm command generator' do
- let(:commands) do
- <<~EOS
- export HELM_HOST="localhost:44134"
- tiller -listen ${HELM_HOST} -alsologtostderr &
- helm init --client-only
- helm repo add app-name https://repository.example.com
- helm repo update
- #{helm_upgrade_command}
- EOS
- end
-
- let(:helm_upgrade_command) do
- <<~EOS.squish
- helm upgrade app-name chart-name
- --reuse-values
- --version 1.2.3
- --namespace gitlab-managed-apps
- -f /data/helm/app-name/config/values.yaml
- EOS
- end
- end
- end
-
context 'when there is no version' do
let(:version) { nil }
diff --git a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v2/reset_command_spec.rb
index 5a3ba59b8c0..9e580cea397 100644
--- a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/v2/reset_command_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Kubernetes::Helm::ResetCommand do
+RSpec.describe Gitlab::Kubernetes::Helm::V2::ResetCommand do
subject(:reset_command) { described_class.new(name: name, rbac: rbac, files: files) }
let(:rbac) { true }
diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/base_command_spec.rb
index a7abd6ab1bf..ad5ff13b4c9 100644
--- a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/v3/base_command_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Kubernetes::Helm::BaseCommand do
+RSpec.describe Gitlab::Kubernetes::Helm::V3::BaseCommand do
subject(:base_command) do
test_class.new(rbac)
end
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Kubernetes::Helm::BaseCommand do
let(:rbac) { false }
let(:test_class) do
- Class.new(Gitlab::Kubernetes::Helm::BaseCommand) do
+ Class.new(described_class) do
def initialize(rbac)
super(
name: 'test-class-name',
@@ -22,6 +22,12 @@ RSpec.describe Gitlab::Kubernetes::Helm::BaseCommand do
end
end
+ describe 'HELM_VERSION' do
+ subject { described_class::HELM_VERSION }
+
+ it { is_expected.to match /^3\.\d+\.\d+$/ }
+ end
+
it_behaves_like 'helm command generator' do
let(:commands) { '' }
end
diff --git a/spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb
new file mode 100644
index 00000000000..63e7a8d2f25
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/helm/v3/delete_command_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Helm::V3::DeleteCommand do
+ subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) }
+
+ let(:app_name) { 'app-name' }
+ let(:rbac) { true }
+ let(:files) { {} }
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm uninstall app-name --namespace gitlab-managed-apps
+ EOS
+ end
+ end
+
+ describe '#pod_name' do
+ subject { delete_command.pod_name }
+
+ it { is_expected.to eq('uninstall-app-name') }
+ end
+
+ it_behaves_like 'helm command' do
+ let(:command) { delete_command }
+ end
+
+ describe '#delete_command' do
+ it 'deletes the release' do
+ expect(subject.delete_command).to eq('helm uninstall app-name --namespace gitlab-managed-apps')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb
new file mode 100644
index 00000000000..2bf1f713b3f
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/helm/v3/install_command_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Helm::V3::InstallCommand do
+ subject(:install_command) do
+ described_class.new(
+ name: 'app-name',
+ chart: 'chart-name',
+ rbac: rbac,
+ files: files,
+ version: version,
+ repository: repository,
+ preinstall: preinstall,
+ postinstall: postinstall
+ )
+ end
+
+ let(:files) { { 'ca.pem': 'some file content' } }
+ let(:repository) { 'https://repository.example.com' }
+ let(:rbac) { false }
+ let(:version) { '1.2.3' }
+ let(:preinstall) { nil }
+ let(:postinstall) { nil }
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm repo add app-name https://repository.example.com
+ helm repo update
+ #{helm_install_comand}
+ EOS
+ end
+
+ let(:helm_install_comand) do
+ <<~EOS.squish
+ helm upgrade app-name chart-name
+ --install
+ --atomic
+ --cleanup-on-fail
+ --reset-values
+ --version 1.2.3
+ --set rbac.create\\=false,rbac.enabled\\=false
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml
+ EOS
+ end
+ end
+
+ context 'when rbac is true' do
+ let(:rbac) { true }
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm repo add app-name https://repository.example.com
+ helm repo update
+ #{helm_install_command}
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm upgrade app-name chart-name
+ --install
+ --atomic
+ --cleanup-on-fail
+ --reset-values
+ --version 1.2.3
+ --set rbac.create\\=true,rbac.enabled\\=true
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml
+ EOS
+ end
+ end
+ end
+
+ context 'when there is a pre-install script' do
+ let(:preinstall) { ['/bin/date', '/bin/true'] }
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm repo add app-name https://repository.example.com
+ helm repo update
+ /bin/date
+ /bin/true
+ #{helm_install_command}
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm upgrade app-name chart-name
+ --install
+ --atomic
+ --cleanup-on-fail
+ --reset-values
+ --version 1.2.3
+ --set rbac.create\\=false,rbac.enabled\\=false
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml
+ EOS
+ end
+ end
+ end
+
+ context 'when there is a post-install script' do
+ let(:postinstall) { ['/bin/date', "/bin/false\n"] }
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm repo add app-name https://repository.example.com
+ helm repo update
+ #{helm_install_command}
+ /bin/date
+ /bin/false
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm upgrade app-name chart-name
+ --install
+ --atomic
+ --cleanup-on-fail
+ --reset-values
+ --version 1.2.3
+ --set rbac.create\\=false,rbac.enabled\\=false
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml
+ EOS
+ end
+ end
+ end
+
+ context 'when there is no version' do
+ let(:version) { nil }
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm repo add app-name https://repository.example.com
+ helm repo update
+ #{helm_install_command}
+ EOS
+ end
+
+ let(:helm_install_command) do
+ <<~EOS.squish
+ helm upgrade app-name chart-name
+ --install
+ --atomic
+ --cleanup-on-fail
+ --reset-values
+ --set rbac.create\\=false,rbac.enabled\\=false
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml
+ EOS
+ end
+ end
+ end
+
+ it_behaves_like 'helm command' do
+ let(:command) { install_command }
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb
new file mode 100644
index 00000000000..2f22e0f2e77
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/helm/v3/patch_command_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::Helm::V3::PatchCommand do
+ let(:files) { { 'ca.pem': 'some file content' } }
+ let(:repository) { 'https://repository.example.com' }
+ let(:rbac) { false }
+ let(:version) { '1.2.3' }
+
+ subject(:patch_command) do
+ described_class.new(
+ name: 'app-name',
+ chart: 'chart-name',
+ rbac: rbac,
+ files: files,
+ version: version,
+ repository: repository
+ )
+ end
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm repo add app-name https://repository.example.com
+ helm repo update
+ #{helm_upgrade_comand}
+ EOS
+ end
+
+ let(:helm_upgrade_comand) do
+ <<~EOS.squish
+ helm upgrade app-name chart-name
+ --reuse-values
+ --version 1.2.3
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml
+ EOS
+ end
+ end
+
+ context 'when rbac is true' do
+ let(:rbac) { true }
+
+ it_behaves_like 'helm command generator' do
+ let(:commands) do
+ <<~EOS
+ helm repo add app-name https://repository.example.com
+ helm repo update
+ #{helm_upgrade_command}
+ EOS
+ end
+
+ let(:helm_upgrade_command) do
+ <<~EOS.squish
+ helm upgrade app-name chart-name
+ --reuse-values
+ --version 1.2.3
+ --namespace gitlab-managed-apps
+ -f /data/helm/app-name/config/values.yaml
+ EOS
+ end
+ end
+ end
+
+ context 'when there is no version' do
+ let(:version) { nil }
+
+ it { expect { patch_command }.to raise_error(ArgumentError, 'version is required') }
+ end
+
+ describe '#pod_name' do
+ subject { patch_command.pod_name }
+
+ it { is_expected.to eq 'install-app-name' }
+ end
+
+ it_behaves_like 'helm command' do
+ let(:command) { patch_command }
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 7b6d143dda9..521f13dc9cc 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
case method_name
when /\A(get_|delete_)/
client.public_send(method_name)
- when /\A(create_|update_)/
+ when /\A(create_|update_|patch_)/
client.public_send(method_name, {})
else
raise "Unknown method name #{method_name}"
@@ -302,6 +302,8 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
:create_role,
:get_role,
:update_role,
+ :delete_role_binding,
+ :update_role_binding,
:update_cluster_role_binding
].each do |method|
describe "##{method}" do
@@ -375,6 +377,34 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
end
end
+ describe '#patch_ingress' do
+ let(:extensions_client) { client.extensions_client }
+ let(:networking_client) { client.networking_client }
+
+ include_examples 'redirection not allowed', 'patch_ingress'
+ include_examples 'dns rebinding not allowed', 'patch_ingress'
+
+ it 'delegates to the extensions client' do
+ expect(extensions_client).to receive(:patch_ingress)
+
+ client.patch_ingress
+ end
+
+ context 'extensions does not have ingress for Kubernetes 1.22+ clusters' do
+ before do
+ WebMock
+ .stub_request(:get, api_url + '/apis/extensions/v1beta1')
+ .to_return(kube_response(kube_1_22_extensions_v1beta1_discovery_body))
+ end
+
+ it 'delegates to the apps client' do
+ expect(networking_client).to receive(:patch_ingress)
+
+ client.patch_ingress
+ end
+ end
+ end
+
describe 'istio API group' do
let(:istio_client) { client.istio_client }
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index 56d708a1e11..56074147854 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
allow_any_instance_of(Octokit::Client).to receive(:milestones).and_return([milestone, milestone])
allow_any_instance_of(Octokit::Client).to receive(:issues).and_return([issue1, issue2])
allow_any_instance_of(Octokit::Client).to receive(:pull_requests).and_return([pull_request, pull_request])
- allow_any_instance_of(Octokit::Client).to receive(:issues_comments).and_return([])
+ allow_any_instance_of(Octokit::Client).to receive(:issues_comments).and_raise(Octokit::NotFound)
allow_any_instance_of(Octokit::Client).to receive(:pull_requests_comments).and_return([])
allow_any_instance_of(Octokit::Client).to receive(:last_response).and_return(double(rels: { next: nil }))
allow_any_instance_of(Octokit::Client).to receive(:releases).and_return([release1, release2])
@@ -169,6 +169,7 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
errors: [
{ type: :label, url: "#{api_root}/repos/octocat/Hello-World/labels/bug", errors: "Validation failed: Title can't be blank, Title is invalid" },
{ type: :issue, url: "#{api_root}/repos/octocat/Hello-World/issues/1348", errors: "Validation failed: Title can't be blank" },
+ { type: :issues_comments, errors: 'Octokit::NotFound' },
{ type: :wiki, errors: "Gitlab::Git::CommandError" }
]
}
@@ -274,7 +275,7 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
allow(project).to receive(:import_data).and_return(double(credentials: credentials))
expect(Gitlab::LegacyGithubImport::Client).to receive(:new).with(
credentials[:user],
- {}
+ **{}
)
subject.client
diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
index 631325402d9..1f7daaa308d 100644
--- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
+RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do
let(:app) { double('app') }
subject { described_class.new(app) }
@@ -21,20 +21,15 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
allow(app).to receive(:call).and_return([200, nil, nil])
end
- it 'increments requests count' do
- expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 200, feature_category: 'unknown')
-
- subject.call(env)
- end
-
RSpec::Matchers.define :a_positive_execution_time do
match { |actual| actual > 0 }
end
- it 'measures execution time' do
+ it 'tracks request count and duration' do
+ expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown')
expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ method: 'get' }, a_positive_execution_time)
- Timecop.scale(3600) { subject.call(env) }
+ subject.call(env)
end
context 'request is a health check endpoint' do
@@ -44,15 +39,10 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
env['PATH_INFO'] = path
end
- it 'increments health endpoint counter rather than overall counter' do
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get', status: 200)
- expect(described_class).not_to receive(:http_request_total)
-
- subject.call(env)
- end
-
- it 'does not record the request duration' do
+ it 'increments health endpoint counter rather than overall counter and does not record duration' do
expect(described_class).not_to receive(:http_request_duration_seconds)
+ expect(described_class).not_to receive(:http_requests_total)
+ expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get', status: '200')
subject.call(env)
end
@@ -67,14 +57,9 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
env['PATH_INFO'] = path
end
- it 'increments overall counter rather than health endpoint counter' do
- expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 200, feature_category: 'unknown')
+ it 'increments regular counters and tracks duration' do
+ expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown')
expect(described_class).not_to receive(:http_health_requests_total)
-
- subject.call(env)
- end
-
- it 'records the request duration' do
expect(described_class)
.to receive_message_chain(:http_request_duration_seconds, :observe)
.with({ method: 'get' }, a_positive_execution_time)
@@ -88,62 +73,91 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
context '@app.call throws exception' do
let(:http_request_duration_seconds) { double('http_request_duration_seconds') }
+ let(:http_requests_total) { double('http_requests_total') }
before do
allow(app).to receive(:call).and_raise(StandardError)
allow(described_class).to receive(:http_request_duration_seconds).and_return(http_request_duration_seconds)
+ allow(described_class).to receive(:http_requests_total).and_return(http_requests_total)
end
- it 'increments exceptions count' do
+ it 'tracks the correct metrics' do
expect(described_class).to receive_message_chain(:rack_uncaught_errors_count, :increment)
+ expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'unknown')
+ expect(described_class.http_request_duration_seconds).not_to receive(:observe)
expect { subject.call(env) }.to raise_error(StandardError)
end
+ end
- it 'increments requests count' do
- expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'unknown')
-
- expect { subject.call(env) }.to raise_error(StandardError)
- end
+ context 'feature category header' do
+ context 'when a feature category header is present' do
+ before do
+ allow(app).to receive(:call).and_return([200, { described_class::FEATURE_CATEGORY_HEADER => 'issue_tracking' }, nil])
+ end
- it "does't measure request execution time" do
- expect(described_class.http_request_duration_seconds).not_to receive(:increment)
+ it 'adds the feature category to the labels for http_requests_total' do
+ expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'issue_tracking')
+ expect(described_class).not_to receive(:http_health_requests_total)
- expect { subject.call(env) }.to raise_error(StandardError)
- end
- end
+ subject.call(env)
+ end
- context 'when a feature category header is present' do
- before do
- allow(app).to receive(:call).and_return([200, { described_class::FEATURE_CATEGORY_HEADER => 'issue_tracking' }, nil])
- end
+ it 'does not record a feature category for health check endpoints' do
+ env['PATH_INFO'] = '/-/liveness'
- it 'adds the feature category to the labels for http_request_total' do
- expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 200, feature_category: 'issue_tracking')
+ expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get', status: '200')
+ expect(described_class).not_to receive(:http_requests_total)
- subject.call(env)
+ subject.call(env)
+ end
end
- it 'does not record a feature category for health check endpoints' do
- env['PATH_INFO'] = '/-/liveness'
+ context 'when the feature category header is an empty string' do
+ before do
+ allow(app).to receive(:call).and_return([200, { described_class::FEATURE_CATEGORY_HEADER => '' }, nil])
+ end
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get', status: 200)
- expect(described_class).not_to receive(:http_request_total)
+ it 'sets the feature category to unknown' do
+ expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown')
+ expect(described_class).not_to receive(:http_health_requests_total)
- subject.call(env)
+ subject.call(env)
+ end
end
end
- describe '.initialize_http_request_duration_seconds' do
- it "sets labels" do
+ describe '.initialize_metrics', :prometheus do
+ it "sets labels for http_requests_total" do
expected_labels = []
- described_class::HTTP_METHODS.each do |method|
- expected_labels << { method: method }
+
+ described_class::HTTP_METHODS.each do |method, statuses|
+ statuses.each do |status|
+ described_class::FEATURE_CATEGORIES_TO_INITIALIZE.each do |feature_category|
+ expected_labels << { method: method.to_s, status: status.to_s, feature_category: feature_category.to_s }
+ end
+ end
end
- described_class.initialize_http_request_duration_seconds
+ described_class.initialize_metrics
+
+ expect(described_class.http_requests_total.values.keys).to contain_exactly(*expected_labels)
+ end
+
+ it 'sets labels for http_request_duration_seconds' do
+ expected_labels = described_class::HTTP_METHODS.keys.map { |method| { method: method } }
+
+ described_class.initialize_metrics
+
expect(described_class.http_request_duration_seconds.values.keys).to include(*expected_labels)
end
+
+ it 'has every label in config/feature_categories.yml' do
+ defaults = [described_class::FEATURE_CATEGORY_DEFAULT, 'not_owned']
+ feature_categories = YAML.load_file(Rails.root.join('config', 'feature_categories.yml')).map(&:strip) + defaults
+
+ expect(described_class::FEATURE_CATEGORIES_TO_INITIALIZE).to all(be_in(feature_categories))
+ end
end
end
end
diff --git a/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb b/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
new file mode 100644
index 00000000000..e806f6478b7
--- /dev/null
+++ b/spec/lib/gitlab/middleware/handle_malformed_strings_spec.rb
@@ -0,0 +1,182 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require "rack/test"
+
+RSpec.describe Gitlab::Middleware::HandleMalformedStrings do
+ include GitHttpHelpers
+
+ let(:null_byte) { "\u0000" }
+ let(:escaped_null_byte) { "%00" }
+ let(:invalid_string) { "mal\xC0formed" }
+ let(:escaped_invalid_string) { "mal%c0formed" }
+ let(:error_400) { [400, { 'Content-Type' => 'text/plain' }, ['Bad Request']] }
+ let(:app) { double(:app) }
+
+ subject { described_class.new(app) }
+
+ before do
+ allow(app).to receive(:call) do |args|
+ args
+ end
+ end
+
+ def env_for(params = {})
+ Rack::MockRequest.env_for('/', { params: params })
+ end
+
+ context 'in the URL' do
+ it 'rejects null bytes' do
+ # We have to create the env separately or Rack::MockRequest complains about invalid URI
+ env = env_for
+ env['PATH_INFO'] = "/someplace/witha#{null_byte}nullbyte"
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it 'rejects escaped null bytes' do
+ # We have to create the env separately or Rack::MockRequest complains about invalid URI
+ env = env_for
+ env['PATH_INFO'] = "/someplace/withan#{escaped_null_byte}escaped nullbyte"
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it 'rejects malformed strings' do
+ # We have to create the env separately or Rack::MockRequest complains about invalid URI
+ env = env_for
+ env['PATH_INFO'] = "/someplace/with_an/#{invalid_string}"
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it 'rejects escaped malformed strings' do
+ # We have to create the env separately or Rack::MockRequest complains about invalid URI
+ env = env_for
+ env['PATH_INFO'] = "/someplace/with_an/#{escaped_invalid_string}"
+
+ expect(subject.call(env)).to eq error_400
+ end
+ end
+
+ context 'in authorization headers' do
+ let(:problematic_input) { null_byte }
+
+ shared_examples 'rejecting invalid input' do
+ it 'rejects problematic input in the password' do
+ env = env_for.merge(auth_env("username", "password#{problematic_input}encoded", nil))
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it 'rejects problematic input in the username' do
+ env = env_for.merge(auth_env("username#{problematic_input}", "passwordencoded", nil))
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it 'rejects problematic input in non-basic-auth tokens' do
+ env = env_for.merge('HTTP_AUTHORIZATION' => "GL-Geo hello#{problematic_input}world")
+
+ expect(subject.call(env)).to eq error_400
+ end
+ end
+
+ it_behaves_like 'rejecting invalid input' do
+ let(:problematic_input) { null_byte }
+ end
+
+ it_behaves_like 'rejecting invalid input' do
+ let(:problematic_input) { invalid_string }
+ end
+
+ it_behaves_like 'rejecting invalid input' do
+ let(:problematic_input) { "\xC3" }
+ end
+
+ it 'does not reject correct non-basic-auth tokens' do
+ # This token is known to include a null-byte when we were to try to decode it
+ # as Base64, while it wasn't encoded at such.
+ special_token = 'GL-Geo ta8KakZWpu0AcledQ6n0:eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJkYXRhIjoie1wic2NvcGVcIjpcImdlb19hcGlcIn0iLCJqdGkiOiIwYWFmNzVlYi1lNWRkLTRkZjEtODQzYi1lM2E5ODhhNDMwMzIiLCJpYXQiOjE2MDQ3MDI4NzUsIm5iZiI6MTYwNDcwMjg3MCwiZXhwIjoxNjA0NzAyOTM1fQ.NcgDipDyxSP5uSzxc01ylzH4GkTxJRflNNjT7U6fpg4'
+ expect(Base64.decode64(special_token)).to include(null_byte)
+
+ env = env_for.merge('HTTP_AUTHORIZATION' => special_token)
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+ end
+
+ context 'in params' do
+ shared_examples_for 'checks params' do
+ it 'rejects bad params in a top level param' do
+ env = env_for(name: "null#{problematic_input}byte")
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it "rejects bad params for hashes with strings" do
+ env = env_for(name: { inner_key: "I am #{problematic_input} bad" })
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it "rejects bad params for arrays with strings" do
+ env = env_for(name: ["I am #{problematic_input} bad"])
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it "rejects bad params for arrays containing hashes with string values" do
+ env = env_for(name: [
+ {
+ inner_key: "I am #{problematic_input} bad"
+ }
+ ])
+
+ expect(subject.call(env)).to eq error_400
+ end
+ end
+
+ context 'with null byte' do
+ let(:problematic_input) { null_byte }
+
+ it_behaves_like 'checks params'
+
+ it "gives up and does not reject too deeply nested params" do
+ env = env_for(name: [
+ {
+ inner_key: { deeper_key: [{ hash_inside_array_key: "I am #{problematic_input} bad" }] }
+ }
+ ])
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+ end
+
+ context 'with malformed strings' do
+ it_behaves_like 'checks params' do
+ let(:problematic_input) { invalid_string }
+ end
+ end
+ end
+
+ context 'without problematic input' do
+ it "does not error for strings" do
+ env = env_for(name: "safe name")
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+
+ it "does not error with no params" do
+ env = env_for
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+ end
+
+ it 'does not modify the env' do
+ env = env_for
+
+ expect { subject.call(env) }.not_to change { env }
+ end
+end
diff --git a/spec/lib/gitlab/middleware/handle_null_bytes_spec.rb b/spec/lib/gitlab/middleware/handle_null_bytes_spec.rb
deleted file mode 100644
index 76a5174817e..00000000000
--- a/spec/lib/gitlab/middleware/handle_null_bytes_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require "rack/test"
-
-RSpec.describe Gitlab::Middleware::HandleNullBytes do
- let(:null_byte) { "\u0000" }
- let(:error_400) { [400, {}, ["Bad Request"]] }
- let(:app) { double(:app) }
-
- subject { described_class.new(app) }
-
- before do
- allow(app).to receive(:call) do |args|
- args
- end
- end
-
- def env_for(params = {})
- Rack::MockRequest.env_for('/', { params: params })
- end
-
- context 'with null bytes in params' do
- it 'rejects null bytes in a top level param' do
- env = env_for(name: "null#{null_byte}byte")
-
- expect(subject.call(env)).to eq error_400
- end
-
- it "responds with 400 BadRequest for hashes with strings" do
- env = env_for(name: { inner_key: "I am #{null_byte} bad" })
-
- expect(subject.call(env)).to eq error_400
- end
-
- it "responds with 400 BadRequest for arrays with strings" do
- env = env_for(name: ["I am #{null_byte} bad"])
-
- expect(subject.call(env)).to eq error_400
- end
-
- it "responds with 400 BadRequest for arrays containing hashes with string values" do
- env = env_for(name: [
- {
- inner_key: "I am #{null_byte} bad"
- }
- ])
-
- expect(subject.call(env)).to eq error_400
- end
-
- it "gives up and does not 400 with too deeply nested params" do
- env = env_for(name: [
- {
- inner_key: { deeper_key: [{ hash_inside_array_key: "I am #{null_byte} bad" }] }
- }
- ])
-
- expect(subject.call(env)).not_to eq error_400
- end
- end
-
- context 'without null bytes in params' do
- it "does not respond with a 400 for strings" do
- env = env_for(name: "safe name")
-
- expect(subject.call(env)).not_to eq error_400
- end
-
- it "does not respond with a 400 with no params" do
- env = env_for
-
- expect(subject.call(env)).not_to eq error_400
- end
- end
-
- context 'when disabled via env flag' do
- before do
- stub_env('REJECT_NULL_BYTES', '1')
- end
-
- it 'does not respond with a 400 no matter what' do
- env = env_for(name: "null#{null_byte}byte")
-
- expect(subject.call(env)).not_to eq error_400
- end
- end
-end
diff --git a/spec/lib/gitlab/middleware/read_only_spec.rb b/spec/lib/gitlab/middleware/read_only_spec.rb
index 50dd38278b9..642b47fe087 100644
--- a/spec/lib/gitlab/middleware/read_only_spec.rb
+++ b/spec/lib/gitlab/middleware/read_only_spec.rb
@@ -3,209 +3,11 @@
require 'spec_helper'
RSpec.describe Gitlab::Middleware::ReadOnly do
- include Rack::Test::Methods
- using RSpec::Parameterized::TableSyntax
-
- let(:rack_stack) do
- rack = Rack::Builder.new do
- use ActionDispatch::Session::CacheStore
- use ActionDispatch::Flash
- end
-
- rack.run(subject)
- rack.to_app
- end
-
- let(:observe_env) do
- Module.new do
- attr_reader :env
-
- def call(env)
- @env = env
- super
- end
- end
- end
-
- let(:request) { Rack::MockRequest.new(rack_stack) }
-
- subject do
- described_class.new(fake_app).tap do |app|
- app.extend(observe_env)
- end
- end
-
- context 'normal requests to a read-only GitLab instance' do
- let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'text/plain' }, ['OK']] } }
-
+ context 'when database is read-only' do
before do
allow(Gitlab::Database).to receive(:read_only?) { true }
end
- it 'expects PATCH requests to be disallowed' do
- response = request.patch('/test_request')
-
- expect(response).to be_redirect
- expect(subject).to disallow_request
- end
-
- it 'expects PUT requests to be disallowed' do
- response = request.put('/test_request')
-
- expect(response).to be_redirect
- expect(subject).to disallow_request
- end
-
- it 'expects POST requests to be disallowed' do
- response = request.post('/test_request')
-
- expect(response).to be_redirect
- expect(subject).to disallow_request
- end
-
- it 'expects a internal POST request to be allowed after a disallowed request' do
- response = request.post('/test_request')
-
- expect(response).to be_redirect
-
- response = request.post("/api/#{API::API.version}/internal")
-
- expect(response).not_to be_redirect
- end
-
- it 'expects DELETE requests to be disallowed' do
- response = request.delete('/test_request')
-
- expect(response).to be_redirect
- expect(subject).to disallow_request
- end
-
- it 'expects POST of new file that looks like an LFS batch url to be disallowed' do
- expect(Rails.application.routes).to receive(:recognize_path).and_call_original
- response = request.post('/root/gitlab-ce/new/master/app/info/lfs/objects/batch')
-
- expect(response).to be_redirect
- expect(subject).to disallow_request
- end
-
- it 'returns last_vistited_url for disallowed request' do
- response = request.post('/test_request')
-
- expect(response.location).to eq 'http://localhost/'
- end
-
- context 'whitelisted requests' do
- it 'expects a POST internal request to be allowed' do
- expect(Rails.application.routes).not_to receive(:recognize_path)
- response = request.post("/api/#{API::API.version}/internal")
-
- expect(response).not_to be_redirect
- expect(subject).not_to disallow_request
- end
-
- it 'expects a graphql request to be allowed' do
- response = request.post("/api/graphql")
-
- expect(response).not_to be_redirect
- expect(subject).not_to disallow_request
- end
-
- context 'relative URL is configured' do
- before do
- stub_config_setting(relative_url_root: '/gitlab')
- end
-
- it 'expects a graphql request to be allowed' do
- response = request.post("/gitlab/api/graphql")
-
- expect(response).not_to be_redirect
- expect(subject).not_to disallow_request
- end
- end
-
- context 'sidekiq admin requests' do
- where(:mounted_at) do
- [
- '',
- '/',
- '/gitlab',
- '/gitlab/',
- '/gitlab/gitlab',
- '/gitlab/gitlab/'
- ]
- end
-
- with_them do
- before do
- stub_config_setting(relative_url_root: mounted_at)
- end
-
- it 'allows requests' do
- path = File.join(mounted_at, 'admin/sidekiq')
- response = request.post(path)
-
- expect(response).not_to be_redirect
- expect(subject).not_to disallow_request
-
- response = request.get(path)
-
- expect(response).not_to be_redirect
- expect(subject).not_to disallow_request
- end
- end
- end
-
- where(:description, :path) do
- 'LFS request to batch' | '/root/rouge.git/info/lfs/objects/batch'
- 'LFS request to locks verify' | '/root/rouge.git/info/lfs/locks/verify'
- 'LFS request to locks create' | '/root/rouge.git/info/lfs/locks'
- 'LFS request to locks unlock' | '/root/rouge.git/info/lfs/locks/1/unlock'
- 'request to git-upload-pack' | '/root/rouge.git/git-upload-pack'
- 'request to git-receive-pack' | '/root/rouge.git/git-receive-pack'
- end
-
- with_them do
- it "expects a POST #{description} URL to be allowed" do
- expect(Rails.application.routes).to receive(:recognize_path).and_call_original
- response = request.post(path)
-
- expect(response).not_to be_redirect
- expect(subject).not_to disallow_request
- end
- end
- end
- end
-
- context 'json requests to a read-only GitLab instance' do
- let(:fake_app) { lambda { |env| [200, { 'Content-Type' => 'application/json' }, ['OK']] } }
- let(:content_json) { { 'CONTENT_TYPE' => 'application/json' } }
-
- before do
- allow(Gitlab::Database).to receive(:read_only?) { true }
- end
-
- it 'expects PATCH requests to be disallowed' do
- response = request.patch('/test_request', content_json)
-
- expect(response).to disallow_request_in_json
- end
-
- it 'expects PUT requests to be disallowed' do
- response = request.put('/test_request', content_json)
-
- expect(response).to disallow_request_in_json
- end
-
- it 'expects POST requests to be disallowed' do
- response = request.post('/test_request', content_json)
-
- expect(response).to disallow_request_in_json
- end
-
- it 'expects DELETE requests to be disallowed' do
- response = request.delete('/test_request', content_json)
-
- expect(response).to disallow_request_in_json
- end
+ it_behaves_like 'write access for a read-only GitLab instance'
end
end
diff --git a/spec/lib/gitlab/omniauth_initializer_spec.rb b/spec/lib/gitlab/omniauth_initializer_spec.rb
index a38dffcfce0..577d15b8495 100644
--- a/spec/lib/gitlab/omniauth_initializer_spec.rb
+++ b/spec/lib/gitlab/omniauth_initializer_spec.rb
@@ -101,33 +101,5 @@ RSpec.describe Gitlab::OmniauthInitializer do
subject.execute([google_config])
end
-
- it 'converts client_auth_method to a Symbol for openid_connect' do
- openid_connect_config = {
- 'name' => 'openid_connect',
- 'args' => { name: 'openid_connect', client_auth_method: 'basic' }
- }
-
- expect(devise_config).to receive(:omniauth).with(
- :openid_connect,
- { name: 'openid_connect', client_auth_method: :basic }
- )
-
- subject.execute([openid_connect_config])
- end
-
- it 'converts client_auth_method to a Symbol for strategy_class OpenIDConnect' do
- openid_connect_config = {
- 'name' => 'openid_connect',
- 'args' => { strategy_class: OmniAuth::Strategies::OpenIDConnect, client_auth_method: 'jwt_bearer' }
- }
-
- expect(devise_config).to receive(:omniauth).with(
- :openid_connect,
- { strategy_class: OmniAuth::Strategies::OpenIDConnect, client_auth_method: :jwt_bearer }
- )
-
- subject.execute([openid_connect_config])
- end
end
end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 7cecc29afa4..f320b8a66e8 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -101,10 +101,15 @@ RSpec.describe Gitlab::PathRegex do
.concat(ee_top_level_words)
.concat(files_in_public)
.concat(Array(API::API.prefix.to_s))
+ .concat(sitemap_words)
.compact
.uniq
end
+ let(:sitemap_words) do
+ %w(sitemap sitemap.xml sitemap.xml.gz)
+ end
+
let(:ee_top_level_words) do
%w(unsubscribes v2)
end
@@ -172,7 +177,7 @@ RSpec.describe Gitlab::PathRegex do
# We ban new items in this list, see https://gitlab.com/gitlab-org/gitlab/-/issues/215362
it 'does not allow expansion' do
- expect(described_class::TOP_LEVEL_ROUTES.size).to eq(41)
+ expect(described_class::TOP_LEVEL_ROUTES.size).to eq(44)
end
end
@@ -218,6 +223,8 @@ RSpec.describe Gitlab::PathRegex do
expect(subject).not_to match('admin/')
expect(subject).not_to match('api/')
expect(subject).not_to match('.well-known/')
+ expect(subject).not_to match('sitemap.xml/')
+ expect(subject).not_to match('sitemap.xml.gz/')
end
it 'accepts project wildcard routes' do
@@ -458,4 +465,34 @@ RSpec.describe Gitlab::PathRegex do
it_behaves_like 'invalid snippet routes'
end
+
+ describe '.container_image_regex' do
+ subject { described_class.container_image_regex }
+
+ it { is_expected.to match('gitlab-foss') }
+ it { is_expected.to match('gitlab_foss') }
+ it { is_expected.to match('gitlab-org/gitlab-foss') }
+ it { is_expected.to match('100px.com/100px.ruby') }
+
+ it 'only matches at most one slash' do
+ expect(subject.match('foo/bar/baz')[0]).to eq('foo/bar')
+ end
+
+ it 'does not match other non-word characters' do
+ expect(subject.match('ruby:2.7.0')[0]).to eq('ruby')
+ end
+ end
+
+ describe '.container_image_blob_sha_regex' do
+ subject { described_class.container_image_blob_sha_regex }
+
+ it { is_expected.to match('sha256:asdf1234567890ASDF') }
+ it { is_expected.to match('foo:123') }
+ it { is_expected.to match('a12bc3f590szp') }
+ it { is_expected.not_to match('') }
+
+ it 'does not match malicious characters' do
+ expect(subject.match('sha256:asdf1234%2f')[0]).to eq('sha256:asdf1234')
+ end
+ end
end
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index f4104b78d5c..61fffe3fb6b 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -264,6 +264,22 @@ RSpec.describe Gitlab::QuickActions::Extractor do
expect(msg).to eq 'Fixes #123'
end
+ it 'does not get confused if command comes before an inline code' do
+ msg = "/reopen\n`some inline code`\n/labels ~a\n`more inline code`"
+ msg, commands = extractor.extract_commands(msg)
+
+ expect(commands).to eq([['reopen'], ['labels', '~a']])
+ expect(msg).to eq "`some inline code`\n`more inline code`"
+ end
+
+ it 'does not get confused if command comes before a blockcode' do
+ msg = "/reopen\n```\nsome blockcode\n```\n/labels ~a\n```\nmore blockcode\n```"
+ msg, commands = extractor.extract_commands(msg)
+
+ expect(commands).to eq([['reopen'], ['labels', '~a']])
+ expect(msg).to eq "```\nsome blockcode\n```\n```\nmore blockcode\n```"
+ end
+
it 'does not extract commands inside a blockcode' do
msg = "Hello\r\n```\r\nThis is some text\r\n/close\r\n/assign @user\r\n```\r\n\r\nWorld"
expected = msg.delete("\r")
diff --git a/spec/lib/gitlab/redis/wrapper_spec.rb b/spec/lib/gitlab/redis/wrapper_spec.rb
index 283853ee863..ec233c022ee 100644
--- a/spec/lib/gitlab/redis/wrapper_spec.rb
+++ b/spec/lib/gitlab/redis/wrapper_spec.rb
@@ -26,6 +26,12 @@ RSpec.describe Gitlab::Redis::Wrapper do
end
end
+ describe '.version' do
+ it 'returns a version' do
+ expect(described_class.version).to be_present
+ end
+ end
+
describe '.instrumentation_class' do
it 'raises a NotImplementedError' do
expect(described_class).to receive(:instrumentation_class).and_call_original
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 0172defc75d..229d49868d4 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -296,7 +296,7 @@ RSpec.describe Gitlab::ReferenceExtractor do
end
it 'returns all supported prefixes' do
- expect(prefixes.keys.uniq).to match_array(%w(@ # ~ % ! $ & *iteration:))
+ expect(prefixes.keys.uniq).to match_array(%w(@ # ~ % ! $ & [vulnerability: *iteration:))
end
it 'does not allow one prefix for multiple referables if not allowed specificly' do
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 451526021c1..ebb37f45b95 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -248,6 +248,15 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('!!()()') }
end
+ describe '.composer_dev_version_regex' do
+ subject { described_class.composer_dev_version_regex }
+
+ it { is_expected.to match('dev-master') }
+ it { is_expected.to match('1.x-dev') }
+ it { is_expected.not_to match('foobar') }
+ it { is_expected.not_to match('1.2.3') }
+ end
+
describe '.conan_recipe_component_regex' do
subject { described_class.conan_recipe_component_regex }
diff --git a/spec/lib/gitlab/relative_positioning/mover_spec.rb b/spec/lib/gitlab/relative_positioning/mover_spec.rb
index dafd34585a8..cbb15ae876d 100644
--- a/spec/lib/gitlab/relative_positioning/mover_spec.rb
+++ b/spec/lib/gitlab/relative_positioning/mover_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe RelativePositioning::Mover do
let_it_be(:one_free_space_set) do
indices.drop(1).map { |iid| create(:issue, project: one_free_space, iid: iid.succ) }
end
+
let_it_be(:three_sibs_set) do
[1, 2, 3].map { |iid| create(:issue, iid: iid, project: three_sibs) }
end
diff --git a/spec/lib/gitlab/repository_size_checker_spec.rb b/spec/lib/gitlab/repository_size_checker_spec.rb
index bd030d81d97..20c08da6c54 100644
--- a/spec/lib/gitlab/repository_size_checker_spec.rb
+++ b/spec/lib/gitlab/repository_size_checker_spec.rb
@@ -53,4 +53,10 @@ RSpec.describe Gitlab::RepositorySizeChecker do
describe '#exceeded_size' do
include_examples 'checker size exceeded'
end
+
+ describe '#additional_repo_storage_available?' do
+ it 'returns false' do
+ expect(subject.additional_repo_storage_available?).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/repository_size_error_message_spec.rb b/spec/lib/gitlab/repository_size_error_message_spec.rb
index 53b5ed5518f..78504d201d4 100644
--- a/spec/lib/gitlab/repository_size_error_message_spec.rb
+++ b/spec/lib/gitlab/repository_size_error_message_spec.rb
@@ -53,8 +53,18 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
end
describe '#new_changes_error' do
- it 'returns the correct message' do
- expect(message.new_changes_error).to eq("Your push to this repository would cause it to exceed the size limit of 10 MB so it has been rejected. #{message.more_info_message}")
+ context 'when additional repo storage is available' do
+ it 'returns the correct message' do
+ allow(checker).to receive(:additional_repo_storage_available?).and_return(true)
+
+ expect(message.new_changes_error).to eq('Your push to this repository has been rejected because it would exceed storage limits. Please contact your GitLab administrator for more information.')
+ end
+ end
+
+ context 'when no additional repo storage is available' do
+ it 'returns the correct message' do
+ expect(message.new_changes_error).to eq("Your push to this repository would cause it to exceed the size limit of 10 MB so it has been rejected. #{message.more_info_message}")
+ end
end
end
end
diff --git a/spec/lib/gitlab/robots_txt/parser_spec.rb b/spec/lib/gitlab/robots_txt/parser_spec.rb
index bb88003ce20..f4e97e5e897 100644
--- a/spec/lib/gitlab/robots_txt/parser_spec.rb
+++ b/spec/lib/gitlab/robots_txt/parser_spec.rb
@@ -14,8 +14,13 @@ RSpec.describe Gitlab::RobotsTxt::Parser do
<<~TXT
User-Agent: *
Disallow: /autocomplete/users
- Disallow: /search
+ disallow: /search
Disallow: /api
+ Allow: /users
+ Disallow: /help
+ allow: /help
+ Disallow: /test$
+ Disallow: /ex$mple$
TXT
end
@@ -28,6 +33,12 @@ RSpec.describe Gitlab::RobotsTxt::Parser do
'/api/grapql' | true
'/api/index.html' | true
'/projects' | false
+ '/users' | false
+ '/help' | false
+ '/test' | true
+ '/testfoo' | false
+ '/ex$mple' | true
+ '/ex$mplefoo' | false
end
with_them do
@@ -47,6 +58,7 @@ RSpec.describe Gitlab::RobotsTxt::Parser do
Disallow: /*/*.git
Disallow: /*/archive/
Disallow: /*/repository/archive*
+ Allow: /*/repository/archive/foo
TXT
end
@@ -61,6 +73,7 @@ RSpec.describe Gitlab::RobotsTxt::Parser do
'/projects' | false
'/git' | false
'/projects/git' | false
+ '/project/repository/archive/foo' | false
end
with_them do
diff --git a/spec/lib/gitlab/search/sort_options_spec.rb b/spec/lib/gitlab/search/sort_options_spec.rb
new file mode 100644
index 00000000000..2044fdfc894
--- /dev/null
+++ b/spec/lib/gitlab/search/sort_options_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'gitlab/search/sort_options'
+
+RSpec.describe ::Gitlab::Search::SortOptions do
+ describe '.sort_and_direction' do
+ context 'using order_by and sort' do
+ it 'returns matched options' do
+ expect(described_class.sort_and_direction('created_at', 'asc')).to eq(:created_at_asc)
+ expect(described_class.sort_and_direction('created_at', 'desc')).to eq(:created_at_desc)
+ end
+ end
+
+ context 'using just sort' do
+ it 'returns matched options' do
+ expect(described_class.sort_and_direction(nil, 'created_asc')).to eq(:created_at_asc)
+ expect(described_class.sort_and_direction(nil, 'created_desc')).to eq(:created_at_desc)
+ end
+ end
+
+ context 'when unknown option' do
+ it 'returns unknown' do
+ expect(described_class.sort_and_direction(nil, 'foo_asc')).to eq(:unknown)
+ expect(described_class.sort_and_direction(nil, 'bar_desc')).to eq(:unknown)
+ expect(described_class.sort_and_direction(nil, 'created_bar')).to eq(:unknown)
+
+ expect(described_class.sort_and_direction('created_at', 'foo')).to eq(:unknown)
+ expect(described_class.sort_and_direction('foo', 'desc')).to eq(:unknown)
+ expect(described_class.sort_and_direction('created_at', nil)).to eq(:unknown)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
index cf165d1770b..74834fb9014 100644
--- a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
+++ b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
@@ -108,101 +108,114 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do
end
end
- context 'with --experimental-queue-selector' do
- where do
- {
- 'memory-bound queues' => {
- query: 'resource_boundary=memory',
- included_queues: %w(project_export),
- excluded_queues: %w(merge)
- },
- 'memory- or CPU-bound queues' => {
- query: 'resource_boundary=memory,cpu',
- included_queues: %w(auto_merge:auto_merge_process project_export),
- excluded_queues: %w(merge)
- },
- 'high urgency CI queues' => {
- query: 'feature_category=continuous_integration&urgency=high',
- included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache),
- excluded_queues: %w(merge)
- },
- 'CPU-bound high urgency CI queues' => {
- query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
- included_queues: %w(pipeline_cache:expire_pipeline_cache),
- excluded_queues: %w(pipeline_cache:expire_job_cache merge)
- },
- 'CPU-bound high urgency non-CI queues' => {
- query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
- included_queues: %w(new_issue),
- excluded_queues: %w(pipeline_cache:expire_pipeline_cache)
- },
- 'CI and SCM queues' => {
- query: 'feature_category=continuous_integration|feature_category=source_code_management',
- included_queues: %w(pipeline_cache:expire_job_cache merge),
- excluded_queues: %w(mailers)
- }
- }
+ # Remove with https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/646
+ context 'with --queue-selector and --experimental-queue-selector' do
+ it 'errors' do
+ expect(Gitlab::SidekiqCluster).not_to receive(:start)
+
+ expect { cli.run(%w(--queue-selector name=foo --experimental-queue-selector name=bar)) }
+ .to raise_error(described_class::CommandError)
end
+ end
- with_them do
- it 'expands queues by attributes' do
- expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
- expect(opts).to eq(default_options)
- expect(queues.first).to include(*included_queues)
- expect(queues.first).not_to include(*excluded_queues)
+ # Simplify with https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/646
+ ['--queue-selector', '--experimental-queue-selector'].each do |flag|
+ context "with #{flag}" do
+ where do
+ {
+ 'memory-bound queues' => {
+ query: 'resource_boundary=memory',
+ included_queues: %w(project_export),
+ excluded_queues: %w(merge)
+ },
+ 'memory- or CPU-bound queues' => {
+ query: 'resource_boundary=memory,cpu',
+ included_queues: %w(auto_merge:auto_merge_process project_export),
+ excluded_queues: %w(merge)
+ },
+ 'high urgency CI queues' => {
+ query: 'feature_category=continuous_integration&urgency=high',
+ included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache),
+ excluded_queues: %w(merge)
+ },
+ 'CPU-bound high urgency CI queues' => {
+ query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu',
+ included_queues: %w(pipeline_cache:expire_pipeline_cache),
+ excluded_queues: %w(pipeline_cache:expire_job_cache merge)
+ },
+ 'CPU-bound high urgency non-CI queues' => {
+ query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu',
+ included_queues: %w(new_issue),
+ excluded_queues: %w(pipeline_cache:expire_pipeline_cache)
+ },
+ 'CI and SCM queues' => {
+ query: 'feature_category=continuous_integration|feature_category=source_code_management',
+ included_queues: %w(pipeline_cache:expire_job_cache merge),
+ excluded_queues: %w(mailers)
+ }
+ }
+ end
+
+ with_them do
+ it 'expands queues by attributes' do
+ expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
+ expect(opts).to eq(default_options)
+ expect(queues.first).to include(*included_queues)
+ expect(queues.first).not_to include(*excluded_queues)
+
+ []
+ end
- []
+ cli.run(%W(#{flag} #{query}))
end
- cli.run(%W(--experimental-queue-selector #{query}))
- end
+ it 'works when negated' do
+ expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
+ expect(opts).to eq(default_options)
+ expect(queues.first).not_to include(*included_queues)
+ expect(queues.first).to include(*excluded_queues)
- it 'works when negated' do
- expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts|
- expect(opts).to eq(default_options)
- expect(queues.first).not_to include(*included_queues)
- expect(queues.first).to include(*excluded_queues)
+ []
+ end
- []
+ cli.run(%W(--negate #{flag} #{query}))
end
-
- cli.run(%W(--negate --experimental-queue-selector #{query}))
end
- end
- it 'expands multiple queue groups correctly' do
- expect(Gitlab::SidekiqCluster)
- .to receive(:start)
- .with([['chat_notification'], ['project_export']], default_options)
- .and_return([])
+ it 'expands multiple queue groups correctly' do
+ expect(Gitlab::SidekiqCluster)
+ .to receive(:start)
+ .with([['chat_notification'], ['project_export']], default_options)
+ .and_return([])
- cli.run(%w(--experimental-queue-selector feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
- end
+ cli.run(%W(#{flag} feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
+ end
- it 'allows the special * selector' do
- worker_queues = %w(foo bar baz)
+ it 'allows the special * selector' do
+ worker_queues = %w(foo bar baz)
- expect(Gitlab::SidekiqConfig::CliMethods)
- .to receive(:worker_queues).and_return(worker_queues)
+ expect(Gitlab::SidekiqConfig::CliMethods)
+ .to receive(:worker_queues).and_return(worker_queues)
- expect(Gitlab::SidekiqCluster)
- .to receive(:start).with([worker_queues], default_options)
+ expect(Gitlab::SidekiqCluster)
+ .to receive(:start).with([worker_queues], default_options)
- cli.run(%w(--experimental-queue-selector *))
- end
+ cli.run(%W(#{flag} *))
+ end
- it 'errors when the selector matches no queues' do
- expect(Gitlab::SidekiqCluster).not_to receive(:start)
+ it 'errors when the selector matches no queues' do
+ expect(Gitlab::SidekiqCluster).not_to receive(:start)
- expect { cli.run(%w(--experimental-queue-selector has_external_dependencies=true&has_external_dependencies=false)) }
- .to raise_error(described_class::CommandError)
- end
+ expect { cli.run(%W(#{flag} has_external_dependencies=true&has_external_dependencies=false)) }
+ .to raise_error(described_class::CommandError)
+ end
- it 'errors on an invalid query multiple queue groups correctly' do
- expect(Gitlab::SidekiqCluster).not_to receive(:start)
+ it 'errors on an invalid query multiple queue groups correctly' do
+ expect(Gitlab::SidekiqCluster).not_to receive(:start)
- expect { cli.run(%w(--experimental-queue-selector unknown_field=chatops)) }
- .to raise_error(Gitlab::SidekiqConfig::CliMethods::QueryError)
+ expect { cli.run(%W(#{flag} unknown_field=chatops)) }
+ .to raise_error(Gitlab::SidekiqConfig::CliMethods::QueryError)
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index ad106837c47..b99a5352717 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -119,6 +119,10 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
context 'with SIDEKIQ_LOG_ARGUMENTS disabled' do
+ before do
+ stub_env('SIDEKIQ_LOG_ARGUMENTS', '0')
+ end
+
it 'logs start and end of job without args' do
Timecop.freeze(timestamp) do
expect(logger).to receive(:info).with(start_payload.except('args')).ordered
@@ -150,8 +154,8 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
it 'logs with scheduling latency' do
Timecop.freeze(timestamp) do
- expect(logger).to receive(:info).with(start_payload.except('args')).ordered
- expect(logger).to receive(:info).with(end_payload.except('args')).ordered
+ expect(logger).to receive(:info).with(start_payload).ordered
+ expect(logger).to receive(:info).with(end_payload).ordered
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
@@ -173,12 +177,12 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
let(:expected_end_payload) do
- end_payload.except('args').merge(timing_data)
+ end_payload.merge(timing_data)
end
it 'logs with Gitaly and Rugged timing data' do
Timecop.freeze(timestamp) do
- expect(logger).to receive(:info).with(start_payload.except('args')).ordered
+ expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
subject.call(job, 'test_queue') do
@@ -194,10 +198,10 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
allow(Process).to receive(:clock_gettime).and_call_original
end
- let(:expected_start_payload) { start_payload.except('args') }
+ let(:expected_start_payload) { start_payload }
let(:expected_end_payload) do
- end_payload.except('args').merge('cpu_s' => a_value >= 0)
+ end_payload.merge('cpu_s' => a_value >= 0)
end
let(:expected_end_payload_with_db) do
@@ -228,10 +232,10 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
context 'when there is extra metadata set for the done log' do
- let(:expected_start_payload) { start_payload.except('args') }
+ let(:expected_start_payload) { start_payload }
let(:expected_end_payload) do
- end_payload.except('args').merge("#{ApplicationWorker::LOGGING_EXTRA_KEY}.key1" => 15, "#{ApplicationWorker::LOGGING_EXTRA_KEY}.key2" => 16)
+ end_payload.merge("#{ApplicationWorker::LOGGING_EXTRA_KEY}.key1" => 15, "#{ApplicationWorker::LOGGING_EXTRA_KEY}.key2" => 16)
end
it 'logs it in the done log' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
index 98350fb9b8e..4d12e4b3f6f 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
@@ -3,79 +3,84 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Client, :clean_gitlab_redis_queues do
- let(:worker_class) do
- Class.new do
- def self.name
- 'TestDeduplicationWorker'
- end
+ shared_context 'deduplication worker class' do |strategy, including_scheduled|
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestDeduplicationWorker'
+ end
+
+ include ApplicationWorker
+
+ deduplicate strategy, including_scheduled: including_scheduled
- include ApplicationWorker
+ include ApplicationWorker
- def perform(*args)
+ def perform(*args)
+ end
end
end
- end
- before do
- stub_const('TestDeduplicationWorker', worker_class)
+ before do
+ stub_const('TestDeduplicationWorker', worker_class)
+ end
end
- describe '#call' do
- it 'adds a correct duplicate tag to the jobs', :aggregate_failures do
- TestDeduplicationWorker.bulk_perform_async([['args1'], ['args2'], ['args1']])
+ shared_examples 'client duplicate job' do |strategy|
+ describe '#call' do
+ include_context 'deduplication worker class', strategy, false
- job1, job2, job3 = TestDeduplicationWorker.jobs
-
- expect(job1['duplicate-of']).to be_nil
- expect(job2['duplicate-of']).to be_nil
- expect(job3['duplicate-of']).to eq(job1['jid'])
- end
-
- context 'without scheduled deduplication' do
- it "does not mark a job that's scheduled in the future as a duplicate" do
- TestDeduplicationWorker.perform_async('args1')
- TestDeduplicationWorker.perform_at(1.day.from_now, 'args1')
- TestDeduplicationWorker.perform_in(3.hours, 'args1')
+ it 'adds a correct duplicate tag to the jobs', :aggregate_failures do
+ TestDeduplicationWorker.bulk_perform_async([['args1'], ['args2'], ['args1']])
- duplicates = TestDeduplicationWorker.jobs.map { |job| job['duplicate-of'] }
+ job1, job2, job3 = TestDeduplicationWorker.jobs
- expect(duplicates).to all(be_nil)
+ expect(job1['duplicate-of']).to be_nil
+ expect(job2['duplicate-of']).to be_nil
+ expect(job3['duplicate-of']).to eq(job1['jid'])
end
- end
-
- context 'with scheduled deduplication' do
- let(:scheduled_worker_class) do
- Class.new do
- def self.name
- 'TestDeduplicationWorker'
- end
- include ApplicationWorker
+ context 'without scheduled deduplication' do
+ it "does not mark a job that's scheduled in the future as a duplicate" do
+ TestDeduplicationWorker.perform_async('args1')
+ TestDeduplicationWorker.perform_at(1.day.from_now, 'args1')
+ TestDeduplicationWorker.perform_in(3.hours, 'args1')
- deduplicate :until_executing, including_scheduled: true
+ duplicates = TestDeduplicationWorker.jobs.map { |job| job['duplicate-of'] }
- def perform(*args)
- end
+ expect(duplicates).to all(be_nil)
end
end
- before do
- stub_const('TestDeduplicationWorker', scheduled_worker_class)
- end
+ context 'with scheduled deduplication' do
+ include_context 'deduplication worker class', strategy, true
- it 'adds a correct duplicate tag to the jobs', :aggregate_failures do
- TestDeduplicationWorker.perform_async('args1')
- TestDeduplicationWorker.perform_at(1.day.from_now, 'args1')
- TestDeduplicationWorker.perform_in(3.hours, 'args1')
- TestDeduplicationWorker.perform_in(3.hours, 'args2')
+ before do
+ stub_const('TestDeduplicationWorker', worker_class)
+ end
- job1, job2, job3, job4 = TestDeduplicationWorker.jobs
+ it 'adds a correct duplicate tag to the jobs', :aggregate_failures do
+ TestDeduplicationWorker.perform_async('args1')
+ TestDeduplicationWorker.perform_at(1.day.from_now, 'args1')
+ TestDeduplicationWorker.perform_in(3.hours, 'args1')
+ TestDeduplicationWorker.perform_in(3.hours, 'args2')
- expect(job1['duplicate-of']).to be_nil
- expect(job2['duplicate-of']).to eq(job1['jid'])
- expect(job3['duplicate-of']).to eq(job1['jid'])
- expect(job4['duplicate-of']).to be_nil
+ job1, job2, job3, job4 = TestDeduplicationWorker.jobs
+
+ expect(job1['duplicate-of']).to be_nil
+ expect(job2['duplicate-of']).to eq(job1['jid'])
+ expect(job3['duplicate-of']).to eq(job1['jid'])
+ expect(job4['duplicate-of']).to be_nil
+ end
end
end
end
+
+ context 'with until_executing strategy' do
+ it_behaves_like 'client duplicate job', :until_executing
+ end
+
+ context 'with until_executed strategy' do
+ it_behaves_like 'client duplicate job', :until_executed
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
index 3f75d867936..09548d21106 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/server_spec.rb
@@ -3,39 +3,71 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Server, :clean_gitlab_redis_queues do
- let(:worker_class) do
- Class.new do
- def self.name
- 'TestDeduplicationWorker'
+ shared_context 'server duplicate job' do |strategy|
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'TestDeduplicationWorker'
+ end
+
+ include ApplicationWorker
+
+ deduplicate strategy
+
+ def perform(*args)
+ self.class.work
+ end
+
+ def self.work
+ end
end
+ end
- include ApplicationWorker
+ before do
+ stub_const('TestDeduplicationWorker', worker_class)
+ end
- def perform(*args)
+ around do |example|
+ with_sidekiq_server_middleware do |chain|
+ chain.add described_class
+ Sidekiq::Testing.inline! { example.run }
end
end
end
- before do
- stub_const('TestDeduplicationWorker', worker_class)
- end
+ context 'with until_executing strategy' do
+ include_context 'server duplicate job', :until_executing
- around do |example|
- with_sidekiq_server_middleware do |chain|
- chain.add described_class
- Sidekiq::Testing.inline! { example.run }
+ describe '#call' do
+ it 'removes the stored job from redis before execution' do
+ bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
+ job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
+
+ expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
+ .to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
+ .and_return(job_definition).twice # once in client middleware
+
+ expect(job_definition).to receive(:delete!).ordered.and_call_original
+ expect(TestDeduplicationWorker).to receive(:work).ordered.and_call_original
+
+ TestDeduplicationWorker.perform_async('hello')
+ end
end
end
- describe '#call' do
- it 'removes the stored job from redis' do
+ context 'with until_executed strategy' do
+ include_context 'server duplicate job', :until_executed
+
+ it 'removes the stored job from redis after execution' do
bare_job = { 'class' => 'TestDeduplicationWorker', 'args' => ['hello'] }
job_definition = Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob.new(bare_job.dup, 'test_deduplication')
expect(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
.to receive(:new).with(a_hash_including(bare_job), 'test_deduplication')
.and_return(job_definition).twice # once in client middleware
- expect(job_definition).to receive(:delete!).and_call_original
+
+ expect(TestDeduplicationWorker).to receive(:work).ordered.and_call_original
+ expect(job_definition).to receive(:delete!).ordered.and_call_original
TestDeduplicationWorker.perform_async('hello')
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
new file mode 100644
index 00000000000..b3d463b6f6b
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuted do
+ it_behaves_like 'deduplicating jobs when scheduling', :until_executed do
+ describe '#perform' do
+ let(:proc) { -> {} }
+
+ it 'deletes the lock after executing' do
+ expect(proc).to receive(:call).ordered
+ expect(fake_duplicate_job).to receive(:delete!).ordered
+
+ strategy.perform({}) do
+ proc.call
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
index 10b18052e9a..d45b6c5fcd1 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
@@ -1,146 +1,20 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting do
- let(:fake_duplicate_job) do
- instance_double(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
- end
-
- subject(:strategy) { described_class.new(fake_duplicate_job) }
-
- describe '#schedule' do
- before do
- allow(Gitlab::SidekiqLogging::DeduplicationLogger.instance).to receive(:log)
- end
-
- it 'checks for duplicates before yielding' do
- expect(fake_duplicate_job).to receive(:scheduled?).twice.ordered.and_return(false)
- expect(fake_duplicate_job).to(
- receive(:check!)
- .with(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob::DUPLICATE_KEY_TTL)
- .ordered
- .and_return('a jid'))
- expect(fake_duplicate_job).to receive(:duplicate?).ordered.and_return(false)
-
- expect { |b| strategy.schedule({}, &b) }.to yield_control
- end
-
- it 'checks worker options for scheduled jobs' do
- expect(fake_duplicate_job).to receive(:scheduled?).ordered.and_return(true)
- expect(fake_duplicate_job).to receive(:options).ordered.and_return({})
- expect(fake_duplicate_job).not_to receive(:check!)
-
- expect { |b| strategy.schedule({}, &b) }.to yield_control
- end
-
- context 'job marking' do
- it 'adds the jid of the existing job to the job hash' do
- allow(fake_duplicate_job).to receive(:scheduled?).and_return(false)
- allow(fake_duplicate_job).to receive(:check!).and_return('the jid')
- allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
- allow(fake_duplicate_job).to receive(:options).and_return({})
- job_hash = {}
+ it_behaves_like 'deduplicating jobs when scheduling', :until_executing do
+ describe '#perform' do
+ let(:proc) { -> {} }
- expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
- expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
+ it 'deletes the lock before executing' do
+ expect(fake_duplicate_job).to receive(:delete!).ordered
+ expect(proc).to receive(:call).ordered
- strategy.schedule(job_hash) {}
-
- expect(job_hash).to include('duplicate-of' => 'the jid')
- end
-
- context 'scheduled jobs' do
- let(:time_diff) { 1.minute }
-
- context 'scheduled in the past' do
- it 'adds the jid of the existing job to the job hash' do
- allow(fake_duplicate_job).to receive(:scheduled?).twice.and_return(true)
- allow(fake_duplicate_job).to receive(:scheduled_at).and_return(Time.now - time_diff)
- allow(fake_duplicate_job).to receive(:options).and_return({ including_scheduled: true })
- allow(fake_duplicate_job).to(
- receive(:check!)
- .with(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob::DUPLICATE_KEY_TTL)
- .and_return('the jid'))
- allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
- job_hash = {}
-
- expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
- expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
-
- strategy.schedule(job_hash) {}
-
- expect(job_hash).to include('duplicate-of' => 'the jid')
- end
+ strategy.perform({}) do
+ proc.call
end
-
- context 'scheduled in the future' do
- it 'adds the jid of the existing job to the job hash' do
- freeze_time do
- allow(fake_duplicate_job).to receive(:scheduled?).twice.and_return(true)
- allow(fake_duplicate_job).to receive(:scheduled_at).and_return(Time.now + time_diff)
- allow(fake_duplicate_job).to receive(:options).and_return({ including_scheduled: true })
- allow(fake_duplicate_job).to(
- receive(:check!).with(time_diff.to_i).and_return('the jid'))
- allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
- job_hash = {}
-
- expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
- expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
-
- strategy.schedule(job_hash) {}
-
- expect(job_hash).to include('duplicate-of' => 'the jid')
- end
- end
- end
- end
- end
-
- context "when the job is droppable" do
- before do
- allow(fake_duplicate_job).to receive(:scheduled?).and_return(false)
- allow(fake_duplicate_job).to receive(:check!).and_return('the jid')
- allow(fake_duplicate_job).to receive(:duplicate?).and_return(true)
- allow(fake_duplicate_job).to receive(:options).and_return({})
- allow(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
- allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
- end
-
- it 'drops the job' do
- schedule_result = nil
-
- expect(fake_duplicate_job).to receive(:droppable?).and_return(true)
-
- expect { |b| schedule_result = strategy.schedule({}, &b) }.not_to yield_control
- expect(schedule_result).to be(false)
- end
-
- it 'logs that the job was dropped' do
- fake_logger = instance_double(Gitlab::SidekiqLogging::DeduplicationLogger)
-
- expect(Gitlab::SidekiqLogging::DeduplicationLogger).to receive(:instance).and_return(fake_logger)
- expect(fake_logger).to receive(:log).with(a_hash_including({ 'jid' => 'new jid' }), 'dropped until executing', {})
-
- strategy.schedule({ 'jid' => 'new jid' }) {}
- end
-
- it 'logs the deduplication options of the worker' do
- fake_logger = instance_double(Gitlab::SidekiqLogging::DeduplicationLogger)
-
- expect(Gitlab::SidekiqLogging::DeduplicationLogger).to receive(:instance).and_return(fake_logger)
- allow(fake_duplicate_job).to receive(:options).and_return({ foo: :bar })
- expect(fake_logger).to receive(:log).with(a_hash_including({ 'jid' => 'new jid' }), 'dropped until executing', { foo: :bar })
-
- strategy.schedule({ 'jid' => 'new jid' }) {}
end
end
end
-
- describe '#perform' do
- it 'deletes the lock before executing' do
- expect(fake_duplicate_job).to receive(:delete!).ordered
- expect { |b| strategy.perform({}, &b) }.to yield_control
- end
- end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
index 84856238aab..e35d779f334 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
@@ -8,6 +8,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies do
expect(described_class.for(:until_executing)).to eq(described_class::UntilExecuting)
end
+ it 'returns the right class for `until_executed`' do
+ expect(described_class.for(:until_executed)).to eq(described_class::UntilExecuted)
+ end
+
it 'returns the right class for `none`' do
expect(described_class.for(:none)).to eq(described_class::None)
end
diff --git a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
index 2f761b69e60..0b2055d3db5 100644
--- a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
@@ -58,25 +58,9 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
)
end
- context 'when feature flag is enabled' do
- let(:path) { 'FEATURE_ON.md.erb' }
+ let(:path) { 'README.md.erb' }
- before do
- stub_feature_flags(sse_erb_support: project)
- end
-
- it { is_expected.to include(is_supported_content: true) }
- end
-
- context 'when feature flag is disabled' do
- let(:path) { 'FEATURE_OFF.md.erb' }
-
- before do
- stub_feature_flags(sse_erb_support: false)
- end
-
- it { is_expected.to include(is_supported_content: false) }
- end
+ it { is_expected.to include(is_supported_content: true) }
end
context 'when file path is nested' do
diff --git a/spec/lib/gitlab/throttle_spec.rb b/spec/lib/gitlab/throttle_spec.rb
index ca2abe94ad2..7462b2e1c38 100644
--- a/spec/lib/gitlab/throttle_spec.rb
+++ b/spec/lib/gitlab/throttle_spec.rb
@@ -12,4 +12,22 @@ RSpec.describe Gitlab::Throttle do
subject
end
end
+
+ describe '.bypass_header' do
+ subject { described_class.bypass_header }
+
+ it 'is nil' do
+ expect(subject).to be_nil
+ end
+
+ context 'when a header is configured' do
+ before do
+ stub_env('GITLAB_THROTTLE_BYPASS_HEADER', 'My-Custom-Header')
+ end
+
+ it 'is a funny upper case rack key' do
+ expect(subject).to eq('HTTP_MY_CUSTOM_HEADER')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
new file mode 100644
index 00000000000..ee63eb6de04
--- /dev/null
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
+ let(:emitter) { SnowplowTracker::Emitter.new('localhost', buffer_size: 1) }
+ let(:tracker) { SnowplowTracker::Tracker.new(emitter, SnowplowTracker::Subject.new, 'namespace', 'app_id') }
+
+ before do
+ stub_application_setting(snowplow_collector_hostname: 'gitfoo.com')
+ stub_application_setting(snowplow_app_id: '_abc123_')
+ end
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ context 'when snowplow is enabled' do
+ before do
+ stub_application_setting(snowplow_enabled: true)
+
+ expect(SnowplowTracker::AsyncEmitter)
+ .to receive(:new)
+ .with('gitfoo.com', { protocol: 'https' })
+ .and_return(emitter)
+
+ expect(SnowplowTracker::Tracker)
+ .to receive(:new)
+ .with(emitter, an_instance_of(SnowplowTracker::Subject), Gitlab::Tracking::SNOWPLOW_NAMESPACE, '_abc123_')
+ .and_return(tracker)
+ end
+
+ describe '#event' do
+ it 'sends event to tracker' do
+ allow(tracker).to receive(:track_struct_event).and_call_original
+
+ subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
+
+ expect(tracker)
+ .to have_received(:track_struct_event)
+ .with('category', 'action', 'label', 'property', 1.5, nil, (Time.now.to_f * 1000).to_i)
+ end
+ end
+
+ describe '#self_describing_event' do
+ it 'sends event to tracker' do
+ allow(tracker).to receive(:track_self_describing_event).and_call_original
+
+ subject.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', foo: 'bar')
+
+ expect(tracker).to have_received(:track_self_describing_event) do |event, context, timestamp|
+ expect(event.to_json[:schema]).to eq('iglu:com.gitlab/foo/jsonschema/1-0-0')
+ expect(event.to_json[:data]).to eq(foo: 'bar')
+ expect(context).to eq(nil)
+ expect(timestamp).to eq((Time.now.to_f * 1000).to_i)
+ end
+ end
+ end
+ end
+
+ context 'when snowplow is not enabled' do
+ describe '#event' do
+ it 'does not send event to tracker' do
+ expect_any_instance_of(SnowplowTracker::Tracker).not_to receive(:track_struct_event)
+
+ subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
+ end
+ end
+
+ describe '#self_describing_event' do
+ it 'does not send event to tracker' do
+ expect_any_instance_of(SnowplowTracker::Tracker).not_to receive(:track_self_describing_event)
+
+ subject.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', foo: 'bar')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/incident_management_spec.rb b/spec/lib/gitlab/tracking/incident_management_spec.rb
index 9c49c76ead7..fbcb9bf3e4c 100644
--- a/spec/lib/gitlab/tracking/incident_management_spec.rb
+++ b/spec/lib/gitlab/tracking/incident_management_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::Tracking::IncidentManagement do
context 'param without label' do
let(:params) { { create_issue: '1' } }
- it_behaves_like 'a tracked event', "enabled_issue_auto_creation_on_alerts", {}
+ it_behaves_like 'a tracked event', "enabled_issue_auto_creation_on_alerts"
end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 6ddeaf98370..805bd92fd43 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
RSpec.describe Gitlab::Tracking do
- let(:timestamp) { Time.utc(2017, 3, 22) }
-
before do
stub_application_setting(snowplow_enabled: true)
stub_application_setting(snowplow_collector_hostname: 'gitfoo.com')
stub_application_setting(snowplow_cookie_domain: '.gitfoo.com')
stub_application_setting(snowplow_app_id: '_abc123_')
+
+ described_class.instance_variable_set("@snowplow", nil)
end
describe '.snowplow_options' do
@@ -35,99 +35,23 @@ RSpec.describe Gitlab::Tracking do
end
end
- describe 'tracking events' do
- shared_examples 'events not tracked' do
- it 'does not track events' do
- stub_application_setting(snowplow_enabled: false)
- expect(SnowplowTracker::AsyncEmitter).not_to receive(:new)
- expect(SnowplowTracker::Tracker).not_to receive(:new)
-
- track_event
- end
- end
-
- around do |example|
- travel_to(timestamp) { example.run }
- end
-
- before do
- described_class.instance_variable_set("@snowplow", nil)
- end
-
- let(:tracker) { double }
-
- def receive_events
- expect(SnowplowTracker::AsyncEmitter).to receive(:new).with(
- 'gitfoo.com', { protocol: 'https' }
- ).and_return('_emitter_')
+ describe '.event' do
+ it 'delegates to snowplow destination' do
+ expect_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
+ .to receive(:event)
+ .with('category', 'action', label: 'label', property: 'property', value: 1.5, context: nil)
- expect(SnowplowTracker::Tracker).to receive(:new).with(
- '_emitter_',
- an_instance_of(SnowplowTracker::Subject),
- 'gl',
- '_abc123_'
- ).and_return(tracker)
+ described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5)
end
+ end
- describe '.event' do
- let(:track_event) do
- described_class.event('category', 'action',
- label: '_label_',
- property: '_property_',
- value: '_value_',
- context: nil
- )
- end
-
- it_behaves_like 'events not tracked'
-
- it 'can track events' do
- receive_events
- expect(tracker).to receive(:track_struct_event).with(
- 'category',
- 'action',
- '_label_',
- '_property_',
- '_value_',
- nil,
- (timestamp.to_f * 1000).to_i
- )
-
- track_event
- end
- end
-
- describe '.self_describing_event' do
- let(:track_event) do
- described_class.self_describing_event('iglu:com.gitlab/example/jsonschema/1-0-2',
- {
- foo: 'bar',
- foo_count: 42
- },
- context: nil
- )
- end
-
- it_behaves_like 'events not tracked'
-
- it 'can track self describing events' do
- receive_events
- expect(SnowplowTracker::SelfDescribingJson).to receive(:new).with(
- 'iglu:com.gitlab/example/jsonschema/1-0-2',
- {
- foo: 'bar',
- foo_count: 42
- }
- ).and_return('_event_json_')
-
- expect(tracker).to receive(:track_self_describing_event).with(
- '_event_json_',
- nil,
- (timestamp.to_f * 1000).to_i
- )
+ describe '.self_describing_event' do
+ it 'delegates to snowplow destination' do
+ expect_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
+ .to receive(:self_describing_event)
+ .with('iglu:com.gitlab/foo/jsonschema/1-0-0', { foo: 'bar' }, context: nil)
- track_event
- end
+ described_class.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', foo: 'bar')
end
end
end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index b49efd6a092..f466d117851 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -350,7 +350,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
expect(described_class).to be_blocked_url('http://[fe80::c800:eff:fe74:8]', allow_local_network: false)
end
- context 'when local domain/IP is whitelisted' do
+ context 'when local domain/IP is allowed' do
let(:url_blocker_attributes) do
{
allow_localhost: false,
@@ -360,11 +360,11 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
before do
allow(ApplicationSetting).to receive(:current).and_return(ApplicationSetting.new)
- stub_application_setting(outbound_local_requests_whitelist: whitelist)
+ stub_application_setting(outbound_local_requests_whitelist: allowlist)
end
- context 'with IPs in whitelist' do
- let(:whitelist) do
+ context 'with IPs in allowlist' do
+ let(:allowlist) do
[
'0.0.0.0',
'127.0.0.1',
@@ -396,7 +396,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
it_behaves_like 'allows local requests', { allow_localhost: false, allow_local_network: false }
- it 'whitelists IP when dns_rebind_protection is disabled' do
+ it 'allows IP when dns_rebind_protection is disabled' do
url = "http://example.com"
attrs = url_blocker_attributes.merge(dns_rebind_protection: false)
@@ -410,8 +410,8 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
end
- context 'with domains in whitelist' do
- let(:whitelist) do
+ context 'with domains in allowlist' do
+ let(:allowlist) do
[
'www.example.com',
'example.com',
@@ -420,7 +420,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
]
end
- it 'allows domains present in whitelist' do
+ it 'allows domains present in allowlist' do
domain = 'example.com'
subdomain1 = 'www.example.com'
subdomain2 = 'subdomain.example.com'
@@ -435,7 +435,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
url_blocker_attributes)
end
- # subdomain2 is not part of the whitelist so it should be blocked
+ # subdomain2 is not part of the allowlist so it should be blocked
stub_domain_resolv(subdomain2, '192.168.1.1') do
expect(described_class).to be_blocked_url("http://#{subdomain2}",
url_blocker_attributes)
@@ -458,8 +458,8 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
shared_examples 'dns rebinding checks' do
- shared_examples 'whitelists the domain' do
- let(:whitelist) { [domain] }
+ shared_examples 'allowlists the domain' do
+ let(:allowlist) { [domain] }
let(:url) { "http://#{domain}" }
before do
@@ -475,13 +475,13 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
context 'enabled' do
let(:dns_rebind_value) { true }
- it_behaves_like 'whitelists the domain'
+ it_behaves_like 'allowlists the domain'
end
context 'disabled' do
let(:dns_rebind_value) { false }
- it_behaves_like 'whitelists the domain'
+ it_behaves_like 'allowlists the domain'
end
end
end
@@ -504,11 +504,11 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
end
context 'with ports' do
- let(:whitelist) do
+ let(:allowlist) do
["127.0.0.1:2000"]
end
- it 'allows domain with port when resolved ip has port whitelisted' do
+ it 'allows domain with port when resolved ip has port allowed' do
stub_domain_resolv("www.resolve-domain.com", '127.0.0.1') do
expect(described_class).not_to be_blocked_url("http://www.resolve-domain.com:2000", url_blocker_attributes)
end
diff --git a/spec/lib/gitlab/url_blockers/domain_allowlist_entry_spec.rb b/spec/lib/gitlab/url_blockers/domain_allowlist_entry_spec.rb
new file mode 100644
index 00000000000..ece0a018d53
--- /dev/null
+++ b/spec/lib/gitlab/url_blockers/domain_allowlist_entry_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UrlBlockers::DomainAllowlistEntry do
+ let(:domain) { 'www.example.com' }
+
+ describe '#initialize' do
+ it 'initializes without port' do
+ domain_allowlist_entry = described_class.new(domain)
+
+ expect(domain_allowlist_entry.domain).to eq(domain)
+ expect(domain_allowlist_entry.port).to be(nil)
+ end
+
+ it 'initializes with port' do
+ port = 8080
+ domain_allowlist_entry = described_class.new(domain, port: port)
+
+ expect(domain_allowlist_entry.domain).to eq(domain)
+ expect(domain_allowlist_entry.port).to eq(port)
+ end
+ end
+
+ describe '#match?' do
+ it 'matches when domain and port are equal' do
+ port = 8080
+ domain_allowlist_entry = described_class.new(domain, port: port)
+
+ expect(domain_allowlist_entry).to be_match(domain, port)
+ end
+
+ it 'matches any port when port is nil' do
+ domain_allowlist_entry = described_class.new(domain)
+
+ expect(domain_allowlist_entry).to be_match(domain, 8080)
+ expect(domain_allowlist_entry).to be_match(domain, 9090)
+ end
+
+ it 'does not match when port is present but requested_port is nil' do
+ domain_allowlist_entry = described_class.new(domain, port: 8080)
+
+ expect(domain_allowlist_entry).not_to be_match(domain, nil)
+ end
+
+ it 'matches when port and requested_port are nil' do
+ domain_allowlist_entry = described_class.new(domain)
+
+ expect(domain_allowlist_entry).to be_match(domain)
+ end
+
+ it 'does not match if domain is not equal' do
+ domain_allowlist_entry = described_class.new(domain)
+
+ expect(domain_allowlist_entry).not_to be_match('www.gitlab.com', 8080)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb b/spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb
deleted file mode 100644
index 58bae109146..00000000000
--- a/spec/lib/gitlab/url_blockers/domain_whitelist_entry_spec.rb
+++ /dev/null
@@ -1,58 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UrlBlockers::DomainWhitelistEntry do
- let(:domain) { 'www.example.com' }
-
- describe '#initialize' do
- it 'initializes without port' do
- domain_whitelist_entry = described_class.new(domain)
-
- expect(domain_whitelist_entry.domain).to eq(domain)
- expect(domain_whitelist_entry.port).to be(nil)
- end
-
- it 'initializes with port' do
- port = 8080
- domain_whitelist_entry = described_class.new(domain, port: port)
-
- expect(domain_whitelist_entry.domain).to eq(domain)
- expect(domain_whitelist_entry.port).to eq(port)
- end
- end
-
- describe '#match?' do
- it 'matches when domain and port are equal' do
- port = 8080
- domain_whitelist_entry = described_class.new(domain, port: port)
-
- expect(domain_whitelist_entry).to be_match(domain, port)
- end
-
- it 'matches any port when port is nil' do
- domain_whitelist_entry = described_class.new(domain)
-
- expect(domain_whitelist_entry).to be_match(domain, 8080)
- expect(domain_whitelist_entry).to be_match(domain, 9090)
- end
-
- it 'does not match when port is present but requested_port is nil' do
- domain_whitelist_entry = described_class.new(domain, port: 8080)
-
- expect(domain_whitelist_entry).not_to be_match(domain, nil)
- end
-
- it 'matches when port and requested_port are nil' do
- domain_whitelist_entry = described_class.new(domain)
-
- expect(domain_whitelist_entry).to be_match(domain)
- end
-
- it 'does not match if domain is not equal' do
- domain_whitelist_entry = described_class.new(domain)
-
- expect(domain_whitelist_entry).not_to be_match('www.gitlab.com', 8080)
- end
- end
-end
diff --git a/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
new file mode 100644
index 00000000000..110a6c17adb
--- /dev/null
+++ b/spec/lib/gitlab/url_blockers/ip_allowlist_entry_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UrlBlockers::IpAllowlistEntry do
+ let(:ipv4) { IPAddr.new('192.168.1.1') }
+
+ describe '#initialize' do
+ it 'initializes without port' do
+ ip_allowlist_entry = described_class.new(ipv4)
+
+ expect(ip_allowlist_entry.ip).to eq(ipv4)
+ expect(ip_allowlist_entry.port).to be(nil)
+ end
+
+ it 'initializes with port' do
+ port = 8080
+ ip_allowlist_entry = described_class.new(ipv4, port: port)
+
+ expect(ip_allowlist_entry.ip).to eq(ipv4)
+ expect(ip_allowlist_entry.port).to eq(port)
+ end
+ end
+
+ describe '#match?' do
+ it 'matches with equivalent IP and port' do
+ port = 8080
+ ip_allowlist_entry = described_class.new(ipv4, port: port)
+
+ expect(ip_allowlist_entry).to be_match(ipv4.to_s, port)
+ end
+
+ it 'matches any port when port is nil' do
+ ip_allowlist_entry = described_class.new(ipv4)
+
+ expect(ip_allowlist_entry).to be_match(ipv4.to_s, 8080)
+ expect(ip_allowlist_entry).to be_match(ipv4.to_s, 9090)
+ end
+
+ it 'does not match when port is present but requested_port is nil' do
+ ip_allowlist_entry = described_class.new(ipv4, port: 8080)
+
+ expect(ip_allowlist_entry).not_to be_match(ipv4.to_s, nil)
+ end
+
+ it 'matches when port and requested_port are nil' do
+ ip_allowlist_entry = described_class.new(ipv4)
+
+ expect(ip_allowlist_entry).to be_match(ipv4.to_s)
+ end
+
+ it 'works with ipv6' do
+ ipv6 = IPAddr.new('fe80::c800:eff:fe74:8')
+ ip_allowlist_entry = described_class.new(ipv6)
+
+ expect(ip_allowlist_entry).to be_match(ipv6.to_s, 8080)
+ end
+
+ it 'matches ipv4 within IPv4 range' do
+ ipv4_range = IPAddr.new('127.0.0.0/28')
+ ip_allowlist_entry = described_class.new(ipv4_range)
+
+ expect(ip_allowlist_entry).to be_match(ipv4_range.to_range.last.to_s, 8080)
+ expect(ip_allowlist_entry).not_to be_match('127.0.1.1', 8080)
+ end
+
+ it 'matches IPv6 within IPv6 range' do
+ ipv6_range = IPAddr.new('fd84:6d02:f6d8:c89e::/124')
+ ip_allowlist_entry = described_class.new(ipv6_range)
+
+ expect(ip_allowlist_entry).to be_match(ipv6_range.to_range.last.to_s, 8080)
+ expect(ip_allowlist_entry).not_to be_match('fd84:6d02:f6d8:f::f', 8080)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb b/spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb
deleted file mode 100644
index 52f9b31165a..00000000000
--- a/spec/lib/gitlab/url_blockers/ip_whitelist_entry_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UrlBlockers::IpWhitelistEntry do
- let(:ipv4) { IPAddr.new('192.168.1.1') }
-
- describe '#initialize' do
- it 'initializes without port' do
- ip_whitelist_entry = described_class.new(ipv4)
-
- expect(ip_whitelist_entry.ip).to eq(ipv4)
- expect(ip_whitelist_entry.port).to be(nil)
- end
-
- it 'initializes with port' do
- port = 8080
- ip_whitelist_entry = described_class.new(ipv4, port: port)
-
- expect(ip_whitelist_entry.ip).to eq(ipv4)
- expect(ip_whitelist_entry.port).to eq(port)
- end
- end
-
- describe '#match?' do
- it 'matches with equivalent IP and port' do
- port = 8080
- ip_whitelist_entry = described_class.new(ipv4, port: port)
-
- expect(ip_whitelist_entry).to be_match(ipv4.to_s, port)
- end
-
- it 'matches any port when port is nil' do
- ip_whitelist_entry = described_class.new(ipv4)
-
- expect(ip_whitelist_entry).to be_match(ipv4.to_s, 8080)
- expect(ip_whitelist_entry).to be_match(ipv4.to_s, 9090)
- end
-
- it 'does not match when port is present but requested_port is nil' do
- ip_whitelist_entry = described_class.new(ipv4, port: 8080)
-
- expect(ip_whitelist_entry).not_to be_match(ipv4.to_s, nil)
- end
-
- it 'matches when port and requested_port are nil' do
- ip_whitelist_entry = described_class.new(ipv4)
-
- expect(ip_whitelist_entry).to be_match(ipv4.to_s)
- end
-
- it 'works with ipv6' do
- ipv6 = IPAddr.new('fe80::c800:eff:fe74:8')
- ip_whitelist_entry = described_class.new(ipv6)
-
- expect(ip_whitelist_entry).to be_match(ipv6.to_s, 8080)
- end
-
- it 'matches ipv4 within IPv4 range' do
- ipv4_range = IPAddr.new('127.0.0.0/28')
- ip_whitelist_entry = described_class.new(ipv4_range)
-
- expect(ip_whitelist_entry).to be_match(ipv4_range.to_range.last.to_s, 8080)
- expect(ip_whitelist_entry).not_to be_match('127.0.1.1', 8080)
- end
-
- it 'matches IPv6 within IPv6 range' do
- ipv6_range = IPAddr.new('fd84:6d02:f6d8:c89e::/124')
- ip_whitelist_entry = described_class.new(ipv6_range)
-
- expect(ip_whitelist_entry).to be_match(ipv6_range.to_range.last.to_s, 8080)
- expect(ip_whitelist_entry).not_to be_match('fd84:6d02:f6d8:f::f', 8080)
- end
- end
-end
diff --git a/spec/lib/gitlab/url_blockers/url_allowlist_spec.rb b/spec/lib/gitlab/url_blockers/url_allowlist_spec.rb
new file mode 100644
index 00000000000..d9e44e9b85c
--- /dev/null
+++ b/spec/lib/gitlab/url_blockers/url_allowlist_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UrlBlockers::UrlAllowlist do
+ include StubRequests
+
+ let(:allowlist) { [] }
+
+ before do
+ allow(ApplicationSetting).to receive(:current).and_return(ApplicationSetting.new)
+ stub_application_setting(outbound_local_requests_whitelist: allowlist)
+ end
+
+ describe '#domain_allowed?' do
+ let(:allowlist) { %w[www.example.com example.com] }
+
+ it 'returns true if domains present in allowlist' do
+ not_allowed = %w[subdomain.example.com example.org]
+
+ aggregate_failures do
+ allowlist.each do |domain|
+ expect(described_class).to be_domain_allowed(domain)
+ end
+
+ not_allowed.each do |domain|
+ expect(described_class).not_to be_domain_allowed(domain)
+ end
+ end
+ end
+
+ it 'returns false when domain is blank' do
+ expect(described_class).not_to be_domain_allowed(nil)
+ end
+
+ context 'with ports' do
+ let(:allowlist) { ['example.io:3000'] }
+
+ it 'returns true if domain and ports present in allowlist' do
+ parsed_allowlist = [['example.io', { port: 3000 }]]
+ not_allowed = [
+ 'example.io',
+ ['example.io', { port: 3001 }]
+ ]
+
+ aggregate_failures do
+ parsed_allowlist.each do |domain_and_port|
+ expect(described_class).to be_domain_allowed(*domain_and_port)
+ end
+
+ not_allowed.each do |domain_and_port|
+ expect(described_class).not_to be_domain_allowed(*domain_and_port)
+ end
+ end
+ end
+ end
+ end
+
+ describe '#ip_allowed?' do
+ let(:allowlist) do
+ [
+ '0.0.0.0',
+ '127.0.0.1',
+ '192.168.1.1',
+ '0:0:0:0:0:ffff:192.168.1.2',
+ '::ffff:c0a8:102',
+ 'fc00:bf8b:e62c:abcd:abcd:aaaa:aaaa:aaaa',
+ '0:0:0:0:0:ffff:169.254.169.254',
+ '::ffff:a9fe:a9fe',
+ '::ffff:a9fe:a864',
+ 'fe80::c800:eff:fe74:8'
+ ]
+ end
+
+ it 'returns true if ips present in allowlist' do
+ aggregate_failures do
+ allowlist.each do |ip_address|
+ expect(described_class).to be_ip_allowed(ip_address)
+ end
+
+ %w[172.16.2.2 127.0.0.2 fe80::c800:eff:fe74:9].each do |ip_address|
+ expect(described_class).not_to be_ip_allowed(ip_address)
+ end
+ end
+ end
+
+ it 'returns false when ip is blank' do
+ expect(described_class).not_to be_ip_allowed(nil)
+ end
+
+ context 'with ip ranges in allowlist' do
+ let(:ipv4_range) { '127.0.0.0/28' }
+ let(:ipv6_range) { 'fd84:6d02:f6d8:c89e::/124' }
+
+ let(:allowlist) do
+ [
+ ipv4_range,
+ ipv6_range
+ ]
+ end
+
+ it 'does not allowlist ipv4 range when not in allowlist' do
+ stub_application_setting(outbound_local_requests_whitelist: [])
+
+ IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
+ expect(described_class).not_to be_ip_allowed(ip.to_s)
+ end
+ end
+
+ it 'allowlists all ipv4s in the range when in allowlist' do
+ IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
+ expect(described_class).to be_ip_allowed(ip.to_s)
+ end
+ end
+
+ it 'does not allowlist ipv6 range when not in allowlist' do
+ stub_application_setting(outbound_local_requests_whitelist: [])
+
+ IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
+ expect(described_class).not_to be_ip_allowed(ip.to_s)
+ end
+ end
+
+ it 'allowlists all ipv6s in the range when in allowlist' do
+ IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
+ expect(described_class).to be_ip_allowed(ip.to_s)
+ end
+ end
+
+ it 'does not allowlist IPs outside the range' do
+ expect(described_class).not_to be_ip_allowed("fd84:6d02:f6d8:c89e:0:0:1:f")
+
+ expect(described_class).not_to be_ip_allowed("127.0.1.15")
+ end
+ end
+
+ context 'with ports' do
+ let(:allowlist) { %w[127.0.0.9:3000 [2001:db8:85a3:8d3:1319:8a2e:370:7348]:443] }
+
+ it 'returns true if ip and ports present in allowlist' do
+ parsed_allowlist = [
+ ['127.0.0.9', { port: 3000 }],
+ ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 443 }]
+ ]
+ not_allowed = [
+ '127.0.0.9',
+ ['127.0.0.9', { port: 3001 }],
+ '[2001:db8:85a3:8d3:1319:8a2e:370:7348]',
+ ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 3001 }]
+ ]
+
+ aggregate_failures do
+ parsed_allowlist.each do |ip_and_port|
+ expect(described_class).to be_ip_allowed(*ip_and_port)
+ end
+
+ not_allowed.each do |ip_and_port|
+ expect(described_class).not_to be_ip_allowed(*ip_and_port)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/url_blockers/url_whitelist_spec.rb b/spec/lib/gitlab/url_blockers/url_whitelist_spec.rb
deleted file mode 100644
index 7a65516be3c..00000000000
--- a/spec/lib/gitlab/url_blockers/url_whitelist_spec.rb
+++ /dev/null
@@ -1,164 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::UrlBlockers::UrlWhitelist do
- include StubRequests
-
- let(:whitelist) { [] }
-
- before do
- allow(ApplicationSetting).to receive(:current).and_return(ApplicationSetting.new)
- stub_application_setting(outbound_local_requests_whitelist: whitelist)
- end
-
- describe '#domain_whitelisted?' do
- let(:whitelist) { ['www.example.com', 'example.com'] }
-
- it 'returns true if domains present in whitelist' do
- not_whitelisted = ['subdomain.example.com', 'example.org']
-
- aggregate_failures do
- whitelist.each do |domain|
- expect(described_class).to be_domain_whitelisted(domain)
- end
-
- not_whitelisted.each do |domain|
- expect(described_class).not_to be_domain_whitelisted(domain)
- end
- end
- end
-
- it 'returns false when domain is blank' do
- expect(described_class).not_to be_domain_whitelisted(nil)
- end
-
- context 'with ports' do
- let(:whitelist) { ['example.io:3000'] }
-
- it 'returns true if domain and ports present in whitelist' do
- parsed_whitelist = [['example.io', { port: 3000 }]]
- not_whitelisted = [
- 'example.io',
- ['example.io', { port: 3001 }]
- ]
-
- aggregate_failures do
- parsed_whitelist.each do |domain_and_port|
- expect(described_class).to be_domain_whitelisted(*domain_and_port)
- end
-
- not_whitelisted.each do |domain_and_port|
- expect(described_class).not_to be_domain_whitelisted(*domain_and_port)
- end
- end
- end
- end
- end
-
- describe '#ip_whitelisted?' do
- let(:whitelist) do
- [
- '0.0.0.0',
- '127.0.0.1',
- '192.168.1.1',
- '0:0:0:0:0:ffff:192.168.1.2',
- '::ffff:c0a8:102',
- 'fc00:bf8b:e62c:abcd:abcd:aaaa:aaaa:aaaa',
- '0:0:0:0:0:ffff:169.254.169.254',
- '::ffff:a9fe:a9fe',
- '::ffff:a9fe:a864',
- 'fe80::c800:eff:fe74:8'
- ]
- end
-
- it 'returns true if ips present in whitelist' do
- aggregate_failures do
- whitelist.each do |ip_address|
- expect(described_class).to be_ip_whitelisted(ip_address)
- end
-
- ['172.16.2.2', '127.0.0.2', 'fe80::c800:eff:fe74:9'].each do |ip_address|
- expect(described_class).not_to be_ip_whitelisted(ip_address)
- end
- end
- end
-
- it 'returns false when ip is blank' do
- expect(described_class).not_to be_ip_whitelisted(nil)
- end
-
- context 'with ip ranges in whitelist' do
- let(:ipv4_range) { '127.0.0.0/28' }
- let(:ipv6_range) { 'fd84:6d02:f6d8:c89e::/124' }
-
- let(:whitelist) do
- [
- ipv4_range,
- ipv6_range
- ]
- end
-
- it 'does not whitelist ipv4 range when not in whitelist' do
- stub_application_setting(outbound_local_requests_whitelist: [])
-
- IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
- expect(described_class).not_to be_ip_whitelisted(ip.to_s)
- end
- end
-
- it 'whitelists all ipv4s in the range when in whitelist' do
- IPAddr.new(ipv4_range).to_range.to_a.each do |ip|
- expect(described_class).to be_ip_whitelisted(ip.to_s)
- end
- end
-
- it 'does not whitelist ipv6 range when not in whitelist' do
- stub_application_setting(outbound_local_requests_whitelist: [])
-
- IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
- expect(described_class).not_to be_ip_whitelisted(ip.to_s)
- end
- end
-
- it 'whitelists all ipv6s in the range when in whitelist' do
- IPAddr.new(ipv6_range).to_range.to_a.each do |ip|
- expect(described_class).to be_ip_whitelisted(ip.to_s)
- end
- end
-
- it 'does not whitelist IPs outside the range' do
- expect(described_class).not_to be_ip_whitelisted("fd84:6d02:f6d8:c89e:0:0:1:f")
-
- expect(described_class).not_to be_ip_whitelisted("127.0.1.15")
- end
- end
-
- context 'with ports' do
- let(:whitelist) { ['127.0.0.9:3000', '[2001:db8:85a3:8d3:1319:8a2e:370:7348]:443'] }
-
- it 'returns true if ip and ports present in whitelist' do
- parsed_whitelist = [
- ['127.0.0.9', { port: 3000 }],
- ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 443 }]
- ]
- not_whitelisted = [
- '127.0.0.9',
- ['127.0.0.9', { port: 3001 }],
- '[2001:db8:85a3:8d3:1319:8a2e:370:7348]',
- ['[2001:db8:85a3:8d3:1319:8a2e:370:7348]', { port: 3001 }]
- ]
-
- aggregate_failures do
- parsed_whitelist.each do |ip_and_port|
- expect(described_class).to be_ip_whitelisted(*ip_and_port)
- end
-
- not_whitelisted.each do |ip_and_port|
- expect(described_class).not_to be_ip_whitelisted(*ip_and_port)
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index b58b5a84662..c892f1f0410 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe Gitlab::UrlBuilder do
:project_milestone | ->(milestone) { "/#{milestone.project.full_path}/-/milestones/#{milestone.iid}" }
:project_snippet | ->(snippet) { "/#{snippet.project.full_path}/-/snippets/#{snippet.id}" }
:project_wiki | ->(wiki) { "/#{wiki.container.full_path}/-/wikis/home" }
+ :release | ->(release) { "/#{release.project.full_path}/-/releases/#{release.tag}" }
:ci_build | ->(build) { "/#{build.project.full_path}/-/jobs/#{build.id}" }
:design | ->(design) { "/#{design.project.full_path}/-/design_management/designs/#{design.id}/raw_image" }
diff --git a/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb b/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
new file mode 100644
index 00000000000..e9fb5346eae
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'aggregated metrics' do
+ RSpec::Matchers.define :be_known_event do
+ match do |event|
+ Gitlab::UsageDataCounters::HLLRedisCounter.known_event?(event)
+ end
+
+ failure_message do
+ "Event with name: `#{event}` can not be found within `#{Gitlab::UsageDataCounters::HLLRedisCounter::KNOWN_EVENTS_PATH}`"
+ end
+ end
+
+ let_it_be(:known_events) do
+ Gitlab::UsageDataCounters::HLLRedisCounter.known_events
+ end
+
+ Gitlab::UsageDataCounters::HLLRedisCounter.aggregated_metrics.tap do |aggregated_metrics|
+ it 'all events has unique name' do
+ event_names = aggregated_metrics&.map { |event| event[:name] }
+
+ expect(event_names).to eq(event_names&.uniq)
+ end
+
+ aggregated_metrics&.each do |aggregate|
+ context "for #{aggregate[:name]} aggregate of #{aggregate[:events].join(' ')}" do
+ let_it_be(:events_records) { known_events.select { |event| aggregate[:events].include?(event[:name]) } }
+
+ it "only refers to known events" do
+ expect(aggregate[:events]).to all be_known_event
+ end
+
+ it "has expected structure" do
+ expect(aggregate.keys).to include(*%w[name operator events])
+ end
+
+ it "uses allowed aggregation operators" do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter::ALLOWED_METRICS_AGGREGATIONS).to include aggregate[:operator]
+ end
+
+ it "uses events from the same Redis slot" do
+ event_slots = events_records.map { |event| event[:redis_slot] }.uniq
+
+ expect(event_slots).to contain_exactly(be_present)
+ end
+
+ it "uses events with the same aggregation period" do
+ event_slots = events_records.map { |event| event[:aggregation] }.uniq
+
+ expect(event_slots).to contain_exactly(be_present)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index e84c3c17274..93704a39555 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -8,6 +8,9 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
+ let(:default_context) { 'default' }
+ let(:invalid_context) { 'invalid' }
+
around do |example|
# We need to freeze to a reference time
# because visits are grouped by the week number in the year
@@ -20,7 +23,28 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
describe '.categories' do
it 'gets all unique category names' do
- expect(described_class.categories).to contain_exactly('analytics', 'compliance', 'ide_edit', 'search', 'source_code', 'incident_management', 'issues_edit', 'testing')
+ expect(described_class.categories).to contain_exactly(
+ 'compliance',
+ 'analytics',
+ 'ide_edit',
+ 'search',
+ 'source_code',
+ 'incident_management',
+ 'testing',
+ 'issues_edit',
+ 'ci_secrets_management',
+ 'maven_packages',
+ 'npm_packages',
+ 'conan_packages',
+ 'nuget_packages',
+ 'pypi_packages',
+ 'composer_packages',
+ 'generic_packages',
+ 'golang_packages',
+ 'debian_packages',
+ 'container_packages',
+ 'tag_packages'
+ )
end
end
@@ -34,11 +58,13 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:no_slot) { 'no_slot' }
let(:different_aggregation) { 'different_aggregation' }
let(:custom_daily_event) { 'g_analytics_custom' }
+ let(:context_event) { 'context_event' }
let(:global_category) { 'global' }
- let(:compliance_category) {'compliance' }
- let(:productivity_category) {'productivity' }
+ let(:compliance_category) { 'compliance' }
+ let(:productivity_category) { 'productivity' }
let(:analytics_category) { 'analytics' }
+ let(:other_category) { 'other' }
let(:known_events) do
[
@@ -47,7 +73,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
{ name: category_productivity_event, redis_slot: "analytics", category: productivity_category, aggregation: "weekly" },
{ name: compliance_slot_event, redis_slot: "compliance", category: compliance_category, aggregation: "weekly" },
{ name: no_slot, category: global_category, aggregation: "daily" },
- { name: different_aggregation, category: global_category, aggregation: "monthly" }
+ { name: different_aggregation, category: global_category, aggregation: "monthly" },
+ { name: context_event, category: other_category, expiry: 6, aggregation: 'weekly' }
].map(&:with_indifferent_access)
end
@@ -77,12 +104,18 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
stub_application_setting(usage_ping_enabled: true)
end
+ it 'tracks event when using symbol' do
+ expect(Gitlab::Redis::HLL).to receive(:add)
+
+ described_class.track_event(entity1, :g_analytics_contribution)
+ end
+
it "raise error if metrics don't have same aggregation" do
- expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
+ expect { described_class.track_event(entity1, different_aggregation, Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
end
it 'raise error if metrics of unknown aggregation' do
- expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
+ expect { described_class.track_event(entity1, 'unknown', Date.current) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
context 'for weekly events' do
@@ -143,6 +176,34 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
+ describe '.track_event_in_context' do
+ context 'with valid contex' do
+ it 'increments conext event counte' do
+ expect(Gitlab::Redis::HLL).to receive(:add) do |kwargs|
+ expect(kwargs[:key]).to match(/^#{default_context}\_.*/)
+ end
+
+ described_class.track_event_in_context(entity1, context_event, default_context)
+ end
+ end
+
+ context 'with empty context' do
+ it 'does not increment a counter' do
+ expect(Gitlab::Redis::HLL).not_to receive(:add)
+
+ described_class.track_event_in_context(entity1, context_event, '')
+ end
+ end
+
+ context 'when sending invalid context' do
+ it 'does not increment a counter' do
+ expect(Gitlab::Redis::HLL).not_to receive(:add)
+
+ described_class.track_event_in_context(entity1, context_event, invalid_context)
+ end
+ end
+ end
+
describe '.unique_events' do
before do
# events in current week, should not be counted as week is not complete
@@ -178,37 +239,89 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
it 'raise error if metrics are not in the same slot' do
- expect { described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
+ expect do
+ described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current)
+ end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::SlotMismatch)
end
it 'raise error if metrics are not in the same category' do
- expect { described_class.unique_events(event_names: [category_analytics_event, category_productivity_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
+ expect do
+ described_class.unique_events(event_names: [category_analytics_event, category_productivity_event], start_date: 4.weeks.ago, end_date: Date.current)
+ end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::CategoryMismatch)
end
it "raise error if metrics don't have same aggregation" do
- expect { described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
+ expect do
+ described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current)
+ end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::AggregationMismatch)
end
context 'when data for the last complete week' do
- it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
+ it { expect(described_class.unique_events(event_names: [weekly_event], start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
end
context 'when data for the last 4 complete weeks' do
- it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
+ it { expect(described_class.unique_events(event_names: [weekly_event], start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
end
context 'when data for the week 4 weeks ago' do
- it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
+ it { expect(described_class.unique_events(event_names: [weekly_event], start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
+ end
+
+ context 'when using symbol as parameter' do
+ it { expect(described_class.unique_events(event_names: [weekly_event.to_sym], start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
end
context 'when using daily aggregation' do
- it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
- it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
- it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
+ it { expect(described_class.unique_events(event_names: [daily_event], start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
+ it { expect(described_class.unique_events(event_names: [daily_event], start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
+ it { expect(described_class.unique_events(event_names: [daily_event], start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
end
context 'when no slot is set' do
- it { expect(described_class.unique_events(event_names: no_slot, start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
+ it { expect(described_class.unique_events(event_names: [no_slot], start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
+ end
+ end
+ end
+
+ describe 'context level tracking' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:known_events) do
+ [
+ { name: 'event_name_1', redis_slot: 'event', category: 'category1', aggregation: "weekly" },
+ { name: 'event_name_2', redis_slot: 'event', category: 'category1', aggregation: "weekly" },
+ { name: 'event_name_3', redis_slot: 'event', category: 'category1', aggregation: "weekly" }
+ ].map(&:with_indifferent_access)
+ end
+
+ before do
+ allow(described_class).to receive(:known_events).and_return(known_events)
+ allow(described_class).to receive(:categories).and_return(%w(category1 category2))
+
+ described_class.track_event_in_context([entity1, entity3], 'event_name_1', default_context, 2.days.ago)
+ described_class.track_event_in_context(entity3, 'event_name_1', default_context, 2.days.ago)
+ described_class.track_event_in_context(entity3, 'event_name_1', invalid_context, 2.days.ago)
+ described_class.track_event_in_context([entity1, entity2], 'event_name_2', '', 2.weeks.ago)
+ end
+
+ subject(:unique_events) { described_class.unique_events(event_names: event_names, start_date: 4.weeks.ago, end_date: Date.current, context: context) }
+
+ context 'with correct arguments' do
+ where(:event_names, :context, :value) do
+ ['event_name_1'] | 'default' | 2
+ ['event_name_1'] | '' | 0
+ ['event_name_2'] | '' | 0
+ end
+
+ with_them do
+ it { is_expected.to eq value }
+ end
+ end
+
+ context 'with invalid context' do
+ it 'raise error' do
+ expect { described_class.unique_events(event_names: 'event_name_1', start_date: 4.weeks.ago, end_date: Date.current, context: invalid_context) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::InvalidContext)
end
end
end
@@ -257,4 +370,183 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
expect(subject.unique_events_data).to eq(results)
end
end
+
+ context 'aggregated_metrics_data' do
+ let(:known_events) do
+ [
+ { name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
+ { name: 'event2_slot', redis_slot: "slot", category: 'category2', aggregation: "weekly" },
+ { name: 'event3_slot', redis_slot: "slot", category: 'category3', aggregation: "weekly" },
+ { name: 'event5_slot', redis_slot: "slot", category: 'category4', aggregation: "weekly" },
+ { name: 'event4', category: 'category2', aggregation: "weekly" }
+ ].map(&:with_indifferent_access)
+ end
+
+ before do
+ allow(described_class).to receive(:known_events).and_return(known_events)
+ end
+
+ shared_examples 'aggregated_metrics_data' do
+ context 'no aggregated metrics is defined' do
+ it 'returns empty hash' do
+ allow(described_class).to receive(:aggregated_metrics).and_return([])
+
+ expect(aggregated_metrics_data).to eq({})
+ end
+ end
+
+ context 'there are aggregated metrics defined' do
+ before do
+ allow(described_class).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+ end
+
+ context 'with AND operator' do
+ let(:aggregated_metrics) do
+ [
+ { name: 'gmau_1', events: %w[event1_slot event2_slot], operator: "AND" },
+ { name: 'gmau_2', events: %w[event1_slot event2_slot event3_slot], operator: "AND" },
+ { name: 'gmau_3', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "AND" },
+ { name: 'gmau_4', events: %w[event4], operator: "AND" }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'returns the number of unique events for all known events' do
+ results = {
+ 'gmau_1' => 3,
+ 'gmau_2' => 2,
+ 'gmau_3' => 1,
+ 'gmau_4' => 3
+ }
+
+ expect(aggregated_metrics_data).to eq(results)
+ end
+ end
+
+ context 'with OR operator' do
+ let(:aggregated_metrics) do
+ [
+ { name: 'gmau_1', events: %w[event3_slot event5_slot], operator: "OR" },
+ { name: 'gmau_2', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "OR" },
+ { name: 'gmau_3', events: %w[event4], operator: "OR" }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'returns the number of unique events for all known events' do
+ results = {
+ 'gmau_1' => 2,
+ 'gmau_2' => 3,
+ 'gmau_3' => 3
+ }
+
+ expect(aggregated_metrics_data).to eq(results)
+ end
+ end
+
+ context 'hidden behind feature flag' do
+ let(:enabled_feature_flag) { 'test_ff_enabled' }
+ let(:disabled_feature_flag) { 'test_ff_disabled' }
+ let(:aggregated_metrics) do
+ [
+ # represents stable aggregated metrics that has been fully released
+ { name: 'gmau_without_ff', events: %w[event3_slot event5_slot], operator: "OR" },
+ # represents new aggregated metric that is under performance testing on gitlab.com
+ { name: 'gmau_enabled', events: %w[event4], operator: "AND", feature_flag: enabled_feature_flag },
+ # represents aggregated metric that is under development and shouldn't be yet collected even on gitlab.com
+ { name: 'gmau_disabled', events: %w[event4], operator: "AND", feature_flag: disabled_feature_flag }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'returns the number of unique events for all known events' do
+ skip_feature_flags_yaml_validation
+ stub_feature_flags(enabled_feature_flag => true, disabled_feature_flag => false)
+
+ expect(aggregated_metrics_data).to eq('gmau_without_ff' => 2, 'gmau_enabled' => 3)
+ end
+ end
+ end
+ end
+
+ describe '.aggregated_metrics_weekly_data' do
+ subject(:aggregated_metrics_data) { described_class.aggregated_metrics_weekly_data }
+
+ before do
+ described_class.track_event(entity1, 'event1_slot', 2.days.ago)
+ described_class.track_event(entity2, 'event1_slot', 2.days.ago)
+ described_class.track_event(entity3, 'event1_slot', 2.days.ago)
+ described_class.track_event(entity1, 'event2_slot', 2.days.ago)
+ described_class.track_event(entity2, 'event2_slot', 3.days.ago)
+ described_class.track_event(entity3, 'event2_slot', 3.days.ago)
+ described_class.track_event(entity1, 'event3_slot', 3.days.ago)
+ described_class.track_event(entity2, 'event3_slot', 3.days.ago)
+ described_class.track_event(entity2, 'event5_slot', 3.days.ago)
+
+ # events out of time scope
+ described_class.track_event(entity3, 'event2_slot', 8.days.ago)
+
+ # events in different slots
+ described_class.track_event(entity1, 'event4', 2.days.ago)
+ described_class.track_event(entity2, 'event4', 2.days.ago)
+ described_class.track_event(entity4, 'event4', 2.days.ago)
+ end
+
+ it_behaves_like 'aggregated_metrics_data'
+ end
+
+ describe '.aggregated_metrics_monthly_data' do
+ subject(:aggregated_metrics_data) { described_class.aggregated_metrics_monthly_data }
+
+ it_behaves_like 'aggregated_metrics_data' do
+ before do
+ described_class.track_event(entity1, 'event1_slot', 2.days.ago)
+ described_class.track_event(entity2, 'event1_slot', 2.days.ago)
+ described_class.track_event(entity3, 'event1_slot', 2.days.ago)
+ described_class.track_event(entity1, 'event2_slot', 2.days.ago)
+ described_class.track_event(entity2, 'event2_slot', 3.days.ago)
+ described_class.track_event(entity3, 'event2_slot', 3.days.ago)
+ described_class.track_event(entity1, 'event3_slot', 3.days.ago)
+ described_class.track_event(entity2, 'event3_slot', 10.days.ago)
+ described_class.track_event(entity2, 'event5_slot', 4.weeks.ago.advance(days: 1))
+
+ # events out of time scope
+ described_class.track_event(entity1, 'event5_slot', 4.weeks.ago.advance(days: -1))
+
+ # events in different slots
+ described_class.track_event(entity1, 'event4', 2.days.ago)
+ described_class.track_event(entity2, 'event4', 2.days.ago)
+ described_class.track_event(entity4, 'event4', 2.days.ago)
+ end
+ end
+
+ context 'Redis calls' do
+ let(:aggregated_metrics) do
+ [
+ { name: 'gmau_3', events: %w[event1_slot event2_slot event3_slot event5_slot], operator: "AND" }
+ ].map(&:with_indifferent_access)
+ end
+
+ let(:known_events) do
+ [
+ { name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
+ { name: 'event2_slot', redis_slot: "slot", category: 'category2', aggregation: "weekly" },
+ { name: 'event3_slot', redis_slot: "slot", category: 'category3', aggregation: "weekly" },
+ { name: 'event5_slot', redis_slot: "slot", category: 'category4', aggregation: "weekly" }
+ ].map(&:with_indifferent_access)
+ end
+
+ it 'caches intermediate operations' do
+ allow(described_class).to receive(:known_events).and_return(known_events)
+ allow(described_class).to receive(:aggregated_metrics).and_return(aggregated_metrics)
+
+ 4.downto(1) do |subset_size|
+ known_events.combination(subset_size).each do |events|
+ keys = described_class.send(:weekly_redis_keys, events: events, start_date: 4.weeks.ago.to_date, end_date: Date.current)
+ expect(Gitlab::Redis::HLL).to receive(:count).with(keys: keys).once.and_return(0)
+ end
+ end
+
+ subject
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index e08dc41d0cc..803eff05efe 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -8,42 +8,8 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:user3) { build(:user, id: 3) }
let(:time) { Time.zone.now }
- shared_examples 'tracks and counts action' do
- before do
- stub_application_setting(usage_ping_enabled: true)
- end
-
- def count_unique(date_from:, date_to:)
- Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: action, start_date: date_from, end_date: date_to)
- end
-
- specify do
- aggregate_failures do
- expect(track_action(author: user1)).to be_truthy
- expect(track_action(author: user1)).to be_truthy
- expect(track_action(author: user2)).to be_truthy
- expect(track_action(author: user3, time: time - 3.days)).to be_truthy
-
- expect(count_unique(date_from: time, date_to: time)).to eq(2)
- expect(count_unique(date_from: time - 5.days, date_to: 1.day.since(time))).to eq(3)
- end
- end
-
- it 'does not track edit actions if author is not present' do
- expect(track_action(author: nil)).to be_nil
- end
-
- context 'when feature flag track_issue_activity_actions is disabled' do
- it 'does not track edit actions' do
- stub_feature_flags(track_issue_activity_actions: false)
-
- expect(track_action(author: user1)).to be_nil
- end
- end
- end
-
context 'for Issue title edit actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_TITLE_CHANGED }
def track_action(params)
@@ -53,7 +19,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue description edit actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED }
def track_action(params)
@@ -63,7 +29,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue assignee edit actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED }
def track_action(params)
@@ -73,7 +39,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue make confidential actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL }
def track_action(params)
@@ -83,7 +49,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue make visible actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_MADE_VISIBLE }
def track_action(params)
@@ -93,7 +59,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue created actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_CREATED }
def track_action(params)
@@ -103,7 +69,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue closed actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_CLOSED }
def track_action(params)
@@ -113,7 +79,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue reopened actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_REOPENED }
def track_action(params)
@@ -123,7 +89,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue label changed actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_LABEL_CHANGED }
def track_action(params)
@@ -133,7 +99,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue cross-referenced actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_CROSS_REFERENCED }
def track_action(params)
@@ -143,7 +109,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue moved actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_MOVED }
def track_action(params)
@@ -153,7 +119,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue relate actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_RELATED }
def track_action(params)
@@ -163,7 +129,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue unrelate actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_UNRELATED }
def track_action(params)
@@ -173,7 +139,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue marked as duplicate actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_MARKED_AS_DUPLICATE }
def track_action(params)
@@ -183,7 +149,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue locked actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_LOCKED }
def track_action(params)
@@ -193,7 +159,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue unlocked actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_UNLOCKED }
def track_action(params)
@@ -202,38 +168,8 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
end
- context 'for Issue added to epic actions' do
- it_behaves_like 'tracks and counts action' do
- let(:action) { described_class::ISSUE_ADDED_TO_EPIC}
-
- def track_action(params)
- described_class.track_issue_added_to_epic_action(**params)
- end
- end
- end
-
- context 'for Issue removed from epic actions' do
- it_behaves_like 'tracks and counts action' do
- let(:action) { described_class::ISSUE_REMOVED_FROM_EPIC}
-
- def track_action(params)
- described_class.track_issue_removed_from_epic_action(**params)
- end
- end
- end
-
- context 'for Issue changed epic actions' do
- it_behaves_like 'tracks and counts action' do
- let(:action) { described_class::ISSUE_CHANGED_EPIC}
-
- def track_action(params)
- described_class.track_issue_changed_epic_action(**params)
- end
- end
- end
-
context 'for Issue designs added actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_DESIGNS_ADDED }
def track_action(params)
@@ -243,7 +179,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue designs modified actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_DESIGNS_MODIFIED }
def track_action(params)
@@ -253,7 +189,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue designs removed actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_DESIGNS_REMOVED }
def track_action(params)
@@ -263,7 +199,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue due date changed actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_DUE_DATE_CHANGED }
def track_action(params)
@@ -273,7 +209,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue time estimate changed actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_TIME_ESTIMATE_CHANGED }
def track_action(params)
@@ -283,7 +219,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue time spent changed actions' do
- it_behaves_like 'tracks and counts action' do
+ it_behaves_like 'a tracked issue edit event' do
let(:action) { described_class::ISSUE_TIME_SPENT_CHANGED }
def track_action(params)
@@ -292,6 +228,36 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
end
+ context 'for Issue comment added actions' do
+ it_behaves_like 'a tracked issue edit event' do
+ let(:action) { described_class::ISSUE_COMMENT_ADDED }
+
+ def track_action(params)
+ described_class.track_issue_comment_added_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue comment edited actions' do
+ it_behaves_like 'a tracked issue edit event' do
+ let(:action) { described_class::ISSUE_COMMENT_EDITED }
+
+ def track_action(params)
+ described_class.track_issue_comment_edited_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue comment removed actions' do
+ it_behaves_like 'a tracked issue edit event' do
+ let(:action) { described_class::ISSUE_COMMENT_REMOVED }
+
+ def track_action(params)
+ described_class.track_issue_comment_removed_action(**params)
+ end
+ end
+ end
+
it 'can return the count of actions per user deduplicated', :aggregate_failures do
described_class.track_issue_title_changed_action(author: user1)
described_class.track_issue_description_changed_action(author: user1)
diff --git a/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
index aaa576865f6..1bf5dad1c9f 100644
--- a/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
@@ -4,7 +4,11 @@ require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::StaticSiteEditorCounter do
it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :views
+ it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :commits
+ it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :merge_requests
it_behaves_like 'a redis usage counter with totals', :static_site_editor,
- views: 3
+ views: 3,
+ commits: 4,
+ merge_requests: 5
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index f64fa2b868d..d305b2c5bfe 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -12,33 +12,37 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe '.uncached_data' do
- describe '.usage_activity_by_stage' do
- subject { described_class.uncached_data }
-
- it 'includes usage_activity_by_stage data' do
- is_expected.to include(:usage_activity_by_stage)
- is_expected.to include(:usage_activity_by_stage_monthly)
- expect(subject[:usage_activity_by_stage])
- .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
- expect(subject[:usage_activity_by_stage_monthly])
- .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
- end
-
- it 'clears memoized values' do
- allow(described_class).to receive(:clear_memoization)
+ subject { described_class.uncached_data }
+
+ it 'includes basic top and second level keys' do
+ is_expected.to include(:counts)
+ is_expected.to include(:counts_monthly)
+ is_expected.to include(:counts_weekly)
+ is_expected.to include(:license)
+ is_expected.to include(:settings)
+
+ # usage_activity_by_stage data
+ is_expected.to include(:usage_activity_by_stage)
+ is_expected.to include(:usage_activity_by_stage_monthly)
+ expect(subject[:usage_activity_by_stage])
+ .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
+ expect(subject[:usage_activity_by_stage_monthly])
+ .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
+ expect(subject[:usage_activity_by_stage][:create])
+ .not_to include(:merge_requests_users)
+ expect(subject[:usage_activity_by_stage_monthly][:create])
+ .to include(:merge_requests_users)
+ expect(subject[:counts_weekly]).to include(:aggregated_metrics)
+ expect(subject[:counts_monthly]).to include(:aggregated_metrics)
+ end
- subject
+ it 'clears memoized values' do
+ allow(described_class).to receive(:clear_memoization)
- described_class::CE_MEMOIZED_VALUES.each do |key|
- expect(described_class).to have_received(:clear_memoization).with(key)
- end
- end
+ subject
- it 'merge_requests_users is included only in montly counters' do
- expect(subject[:usage_activity_by_stage][:create])
- .not_to include(:merge_requests_users)
- expect(subject[:usage_activity_by_stage_monthly][:create])
- .to include(:merge_requests_users)
+ described_class::CE_MEMOIZED_VALUES.each do |key|
+ expect(described_class).to have_received(:clear_memoization).with(key)
end
end
@@ -48,7 +52,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
expect(described_class).to receive(:recorded_at).and_raise(Exception.new('Stopped calculating recorded_at'))
- expect { described_class.uncached_data }.to raise_error('Stopped calculating recorded_at')
+ expect { subject }.to raise_error('Stopped calculating recorded_at')
end
end
@@ -168,6 +172,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
omniauth:
{ providers: omniauth_providers }
)
+ allow(Devise).to receive(:omniauth_providers).and_return(%w(ldapmain ldapsecondary group_saml))
for_defined_days_back do
user = create(:user)
@@ -186,14 +191,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
groups: 2,
users_created: 6,
omniauth_providers: ['google_oauth2'],
- user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4 }
+ user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 }
)
expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
events: 1,
groups: 1,
users_created: 3,
omniauth_providers: ['google_oauth2'],
- user_auth_by_provider: { 'group_saml' => 1, 'ldap' => 2 }
+ user_auth_by_provider: { 'group_saml' => 1, 'ldap' => 2, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 }
)
end
@@ -201,17 +206,25 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
for_defined_days_back do
user = create(:user)
+ create(:bulk_import, user: user)
+
%w(gitlab_project gitlab github bitbucket bitbucket_server gitea git manifest fogbugz phabricator).each do |type|
create(:project, import_type: type, creator_id: user.id)
end
jira_project = create(:project, creator_id: user.id)
create(:jira_import_state, :finished, project: jira_project)
+
+ create(:issue_csv_import, user: user)
end
expect(described_class.usage_activity_by_stage_manage({})).to include(
{
+ bulk_imports: {
+ gitlab: 2
+ },
projects_imported: {
+ total: 20,
gitlab_project: 2,
gitlab: 2,
github: 2,
@@ -224,13 +237,18 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
issues_imported: {
jira: 2,
fogbugz: 2,
- phabricator: 2
+ phabricator: 2,
+ csv: 2
}
}
)
expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
{
+ bulk_imports: {
+ gitlab: 1
+ },
projects_imported: {
+ total: 10,
gitlab_project: 1,
gitlab: 1,
github: 1,
@@ -243,7 +261,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
issues_imported: {
jira: 1,
fogbugz: 1,
- phabricator: 1
+ phabricator: 1,
+ csv: 1
}
}
)
@@ -280,19 +299,29 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
create(:project_tracing_setting)
+ create(:project_error_tracking_setting)
+ create(:incident)
+ create(:incident, alert_management_alert: create(:alert_management_alert))
end
expect(described_class.usage_activity_by_stage_monitor({})).to include(
clusters: 2,
clusters_applications_prometheus: 2,
operations_dashboard_default_dashboard: 2,
- projects_with_tracing_enabled: 2
+ projects_with_tracing_enabled: 2,
+ projects_with_error_tracking_enabled: 2,
+ projects_with_incidents: 4,
+ projects_with_alert_incidents: 2
)
+
expect(described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period)).to include(
clusters: 1,
clusters_applications_prometheus: 1,
operations_dashboard_default_dashboard: 1,
- projects_with_tracing_enabled: 1
+ projects_with_tracing_enabled: 1,
+ projects_with_error_tracking_enabled: 1,
+ projects_with_incidents: 2,
+ projects_with_alert_incidents: 1
)
end
end
@@ -446,9 +475,11 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
expect(count_data[:projects_with_tracing_enabled]).to eq(1)
expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
+ expect(count_data[:projects_with_enabled_alert_integrations]).to eq(1)
expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
expect(count_data[:projects_with_terraform_states]).to eq(2)
+ expect(count_data[:projects_with_alerts_created]).to eq(1)
expect(count_data[:protected_branches]).to eq(2)
expect(count_data[:protected_branches_except_default]).to eq(1)
expect(count_data[:terraform_reports]).to eq(6)
@@ -532,13 +563,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.data[:counts] }
it 'gathers usage data' do
- expect(subject[:projects_with_expiration_policy_enabled]).to eq 22
- expect(subject[:projects_with_expiration_policy_disabled]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled]).to eq 18
+ expect(subject[:projects_with_expiration_policy_disabled]).to eq 5
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_5]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 16
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 12
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_25]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_50]).to eq 1
@@ -546,9 +577,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_7d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_14d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_30d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 18
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 14
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 18
+ expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 14
expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_7d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_14d]).to eq 1
expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1month]).to eq 1
@@ -577,9 +608,22 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe '.system_usage_data_monthly' do
let_it_be(:project) { create(:project) }
- let!(:ud) { build(:usage_data) }
before do
+ project = create(:project)
+ env = create(:environment)
+ create(:package, project: project, created_at: 3.days.ago)
+ create(:package, created_at: 2.months.ago, project: project)
+
+ [3, 31].each do |n|
+ deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
+ create(:deployment, :failed, deployment_options)
+ create(:deployment, :success, deployment_options)
+ create(:project_snippet, project: project, created_at: n.days.ago)
+ create(:personal_snippet, created_at: n.days.ago)
+ create(:alert_management_alert, project: project, created_at: n.days.ago)
+ end
+
stub_application_setting(self_monitoring_project: project)
for_defined_days_back do
@@ -595,10 +639,11 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(counts_monthly[:deployments]).to eq(2)
expect(counts_monthly[:successful_deployments]).to eq(1)
expect(counts_monthly[:failed_deployments]).to eq(1)
- expect(counts_monthly[:snippets]).to eq(3)
+ expect(counts_monthly[:snippets]).to eq(2)
expect(counts_monthly[:personal_snippets]).to eq(1)
- expect(counts_monthly[:project_snippets]).to eq(2)
- expect(counts_monthly[:packages]).to eq(3)
+ expect(counts_monthly[:project_snippets]).to eq(1)
+ expect(counts_monthly[:projects_with_alerts_created]).to eq(1)
+ expect(counts_monthly[:packages]).to eq(1)
expect(counts_monthly[:promoted_issues]).to eq(1)
end
end
@@ -1047,6 +1092,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:user1) { build(:user, id: 1) }
let(:user2) { build(:user, id: 2) }
let(:user3) { build(:user, id: 3) }
+ let(:user4) { build(:user, id: 4) }
before do
counter = Gitlab::UsageDataCounters::TrackUniqueEvents
@@ -1061,6 +1107,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
counter.track_event(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)
counter.track_event(event_action: :created, event_target: wiki, author_id: 3)
counter.track_event(event_action: :created, event_target: design, author_id: 3)
+ counter.track_event(event_action: :created, event_target: design, author_id: 4)
counter = Gitlab::UsageDataCounters::EditorUniqueCounter
@@ -1080,9 +1127,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'returns the distinct count of user actions within the specified time period' do
expect(described_class.action_monthly_active_users(time_period)).to eq(
{
- action_monthly_active_users_design_management: 1,
+ action_monthly_active_users_design_management: 2,
action_monthly_active_users_project_repo: 3,
action_monthly_active_users_wiki_repo: 1,
+ action_monthly_active_users_git_write: 4,
action_monthly_active_users_web_ide_edit: 2,
action_monthly_active_users_sfe_edit: 2,
action_monthly_active_users_snippet_editor_edit: 2,
@@ -1187,7 +1235,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.redis_hll_counters }
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
- let(:ineligible_total_categories) { %w[source_code testing] }
+ let(:ineligible_total_categories) { %w[source_code testing ci_secrets_management] }
it 'has all known_events' do
expect(subject).to have_key(:redis_hll_counters)
@@ -1208,6 +1256,48 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe 'aggregated_metrics' do
+ shared_examples 'aggregated_metrics_for_time_range' do
+ context 'with product_analytics_aggregated_metrics feature flag on' do
+ before do
+ stub_feature_flags(product_analytics_aggregated_metrics: true)
+ end
+
+ it 'uses ::Gitlab::UsageDataCounters::HLLRedisCounter#aggregated_metrics_data', :aggregate_failures do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(aggregated_metrics_data_method).and_return(global_search_gmau: 123)
+ expect(aggregated_metrics_payload).to eq(aggregated_metrics: { global_search_gmau: 123 })
+ end
+ end
+
+ context 'with product_analytics_aggregated_metrics feature flag off' do
+ before do
+ stub_feature_flags(product_analytics_aggregated_metrics: false)
+ end
+
+ it 'returns empty hash', :aggregate_failures do
+ expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(aggregated_metrics_data_method)
+ expect(aggregated_metrics_payload).to be {}
+ end
+ end
+ end
+
+ describe '.aggregated_metrics_weekly' do
+ subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_weekly }
+
+ let(:aggregated_metrics_data_method) { :aggregated_metrics_weekly_data }
+
+ it_behaves_like 'aggregated_metrics_for_time_range'
+ end
+
+ describe '.aggregated_metrics_monthly' do
+ subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_monthly }
+
+ let(:aggregated_metrics_data_method) { :aggregated_metrics_monthly_data }
+
+ it_behaves_like 'aggregated_metrics_for_time_range'
+ end
+ end
+
describe '.service_desk_counts' do
subject { described_class.send(:service_desk_counts) }
diff --git a/spec/lib/gitlab/with_feature_category_spec.rb b/spec/lib/gitlab/with_feature_category_spec.rb
new file mode 100644
index 00000000000..b6fe1c84b26
--- /dev/null
+++ b/spec/lib/gitlab/with_feature_category_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative "../../../lib/gitlab/with_feature_category"
+
+RSpec.describe Gitlab::WithFeatureCategory do
+ describe ".feature_category_for_action" do
+ let(:base_controller) do
+ Class.new do
+ include ::Gitlab::WithFeatureCategory
+ end
+ end
+
+ let(:controller) do
+ Class.new(base_controller) do
+ feature_category :foo, %w(update edit)
+ feature_category :bar, %w(index show)
+ feature_category :quux, %w(destroy)
+ end
+ end
+
+ let(:subclass) do
+ Class.new(controller) do
+ feature_category :baz, %w(subclass_index)
+ end
+ end
+
+ it "is nil when nothing was defined" do
+ expect(base_controller.feature_category_for_action("hello")).to be_nil
+ end
+
+ it "returns the expected category", :aggregate_failures do
+ expect(controller.feature_category_for_action("update")).to eq(:foo)
+ expect(controller.feature_category_for_action("index")).to eq(:bar)
+ expect(controller.feature_category_for_action("destroy")).to eq(:quux)
+ end
+
+ it "returns the expected category for categories defined in subclasses" do
+ expect(subclass.feature_category_for_action("subclass_index")).to eq(:baz)
+ end
+
+ it "raises an error when defining for the controller and for individual actions" do
+ expect do
+ Class.new(base_controller) do
+ feature_category :hello
+ feature_category :goodbye, [:world]
+ end
+ end.to raise_error(ArgumentError, "hello is defined for all actions, but other categories are set")
+ end
+
+ it "raises an error when multiple calls define the same action" do
+ expect do
+ Class.new(base_controller) do
+ feature_category :hello, [:world]
+ feature_category :goodbye, ["world"]
+ end
+ end.to raise_error(ArgumentError, "Actions have multiple feature categories: world")
+ end
+
+ it "does not raise an error when multiple calls define the same action and feature category" do
+ expect do
+ Class.new(base_controller) do
+ feature_category :hello, [:world]
+ feature_category :hello, ["world"]
+ end
+ end.not_to raise_error
+ end
+ end
+end