summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/api/entities/release_spec.rb6
-rw-r--r--spec/lib/api/validations/validators/untrusted_regexp_spec.rb28
-rw-r--r--spec/lib/banzai/filter/design_reference_filter_spec.rb307
-rw-r--r--spec/lib/banzai/filter/external_issue_reference_filter_spec.rb30
-rw-r--r--spec/lib/banzai/filter/gollum_tags_filter_spec.rb34
-rw-r--r--spec/lib/banzai/filter/issue_reference_filter_spec.rb44
-rw-r--r--spec/lib/banzai/filter/label_reference_filter_spec.rb22
-rw-r--r--spec/lib/banzai/filter/repository_link_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/wiki_link_filter_spec.rb16
-rw-r--r--spec/lib/banzai/pipeline/description_pipeline_spec.rb4
-rw-r--r--spec/lib/banzai/pipeline/wiki_pipeline_spec.rb67
-rw-r--r--spec/lib/constraints/feature_constrainer_spec.rb7
-rw-r--r--spec/lib/extracts_path_spec.rb126
-rw-r--r--spec/lib/extracts_ref_spec.rb23
-rw-r--r--spec/lib/feature/gitaly_spec.rb2
-rw-r--r--spec/lib/feature_spec.rb164
-rw-r--r--spec/lib/gitaly/server_spec.rb19
-rw-r--r--spec/lib/gitlab/alert_management/alert_params_spec.rb3
-rw-r--r--spec/lib/gitlab/alert_management/alert_status_counts_spec.rb13
-rw-r--r--spec/lib/gitlab/alert_management/fingerprint_spec.rb48
-rw-r--r--spec/lib/gitlab/alerting/alert_spec.rb4
-rw-r--r--spec/lib/gitlab/alerting/notification_payload_parser_spec.rb35
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb24
-rw-r--r--spec/lib/gitlab/auth/ldap/person_spec.rb7
-rw-r--r--spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb8
-rw-r--r--spec/lib/gitlab/background_migration/reset_merge_status_spec.rb24
-rw-r--r--spec/lib/gitlab/badge/coverage/report_spec.rb2
-rw-r--r--spec/lib/gitlab/badge/coverage/template_spec.rb46
-rw-r--r--spec/lib/gitlab/badge/pipeline/template_spec.rb50
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb5
-rw-r--r--spec/lib/gitlab/cache/import/caching_spec.rb15
-rw-r--r--spec/lib/gitlab/chat_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/build/credentials/factory_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/build/releaser_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/build/step_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/retry_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb72
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/reports/terraform_reports_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/status/bridge/factory_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/status/stage/play_manual_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb2
-rw-r--r--spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb45
-rw-r--r--spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb4
-rw-r--r--spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb4
-rw-r--r--spec/lib/gitlab/code_navigation_path_spec.rb17
-rw-r--r--spec/lib/gitlab/config/entry/factory_spec.rb11
-rw-r--r--spec/lib/gitlab/config/loader/yaml_spec.rb47
-rw-r--r--spec/lib/gitlab/contributions_calendar_spec.rb8
-rw-r--r--spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb80
-rw-r--r--spec/lib/gitlab/danger/changelog_spec.rb4
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb2
-rw-r--r--spec/lib/gitlab/danger/roulette_spec.rb183
-rw-r--r--spec/lib/gitlab/data_builder/alert_spec.rb26
-rw-r--r--spec/lib/gitlab/database/custom_structure_spec.rb65
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb151
-rw-r--r--spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb55
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb (renamed from spec/lib/gitlab/database/partitioning_migration_helpers_spec.rb)79
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb289
-rw-r--r--spec/lib/gitlab/database/schema_cleaner_spec.rb4
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb5
-rw-r--r--spec/lib/gitlab/dependency_linker/go_mod_linker_spec.rb94
-rw-r--r--spec/lib/gitlab/dependency_linker/go_sum_linker_spec.rb65
-rw-r--r--spec/lib/gitlab/dependency_linker_spec.rb16
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb12
-rw-r--r--spec/lib/gitlab/diff/formatters/image_formatter_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/formatters/text_formatter_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb80
-rw-r--r--spec/lib/gitlab/doctor/secrets_spec.rb42
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb28
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb165
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb30
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb12
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb2
-rw-r--r--spec/lib/gitlab/gfm/uploads_rewriter_spec.rb18
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb46
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb30
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb14
-rw-r--r--spec/lib/gitlab/git_access_project_spec.rb166
-rw-r--r--spec/lib/gitlab/git_access_spec.rb150
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb66
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb6
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb8
-rw-r--r--spec/lib/gitlab/gl_repository/identifier_spec.rb82
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb10
-rw-r--r--spec/lib/gitlab/gl_repository_spec.rb2
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb10
-rw-r--r--spec/lib/gitlab/graphql/pagination/filterable_array_connection_spec.rb29
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb114
-rw-r--r--spec/lib/gitlab/import/merge_request_creator_spec.rb2
-rw-r--r--spec/lib/gitlab/import/set_async_jid_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml15
-rw-r--r--spec/lib/gitlab/import_export/attributes_permitter_spec.rb77
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb101
-rw-r--r--spec/lib/gitlab/import_export/importer_spec.rb51
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb22
-rw-r--r--spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb34
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb47
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/repo_restorer_spec.rb27
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml13
-rw-r--r--spec/lib/gitlab/import_export/saver_spec.rb12
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb16
-rw-r--r--spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb83
-rw-r--r--spec/lib/gitlab/instrumentation/redis_base_spec.rb144
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb45
-rw-r--r--spec/lib/gitlab/instrumentation/redis_spec.rb114
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb57
-rw-r--r--spec/lib/gitlab/issuable_metadata_spec.rb10
-rw-r--r--spec/lib/gitlab/jira_import/base_importer_spec.rb16
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb35
-rw-r--r--spec/lib/gitlab/jira_import/labels_importer_spec.rb1
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb125
-rw-r--r--spec/lib/gitlab/kubernetes/helm/api_spec.rb5
-rw-r--r--spec/lib/gitlab/kubernetes/helm/base_command_spec.rb25
-rw-r--r--spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb7
-rw-r--r--spec/lib/gitlab/kubernetes/helm/init_command_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/helm/install_command_spec.rb8
-rw-r--r--spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb8
-rw-r--r--spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/network_policy_spec.rb288
-rw-r--r--spec/lib/gitlab/lfs_token_spec.rb78
-rw-r--r--spec/lib/gitlab/lograge/custom_options_spec.rb57
-rw-r--r--spec/lib/gitlab/looping_batcher_spec.rb71
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb24
-rw-r--r--spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb57
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/methods_spec.rb4
-rw-r--r--spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb61
-rw-r--r--spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb12
-rw-r--r--spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb12
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb12
-rw-r--r--spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb20
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb132
-rw-r--r--spec/lib/gitlab/metrics/transaction_spec.rb40
-rw-r--r--spec/lib/gitlab/middleware/handle_ip_spoof_attack_error_spec.rb24
-rw-r--r--spec/lib/gitlab/monitor/demo_projects_spec.rb37
-rw-r--r--spec/lib/gitlab/no_cache_headers_spec.rb7
-rw-r--r--spec/lib/gitlab/pagination/keyset/request_context_spec.rb12
-rw-r--r--spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb21
-rw-r--r--spec/lib/gitlab/phabricator_import/cache/map_spec.rb2
-rw-r--r--spec/lib/gitlab/process_memory_cache/helper_spec.rb52
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb75
-rw-r--r--spec/lib/gitlab/project_template_spec.rb33
-rw-r--r--spec/lib/gitlab/prometheus/query_variables_spec.rb40
-rw-r--r--spec/lib/gitlab/prometheus_client_spec.rb52
-rw-r--r--spec/lib/gitlab/redis/wrapper_spec.rb16
-rw-r--r--spec/lib/gitlab/reference_extractor_spec.rb6
-rw-r--r--spec/lib/gitlab/regex_spec.rb134
-rw-r--r--spec/lib/gitlab/routing_spec.rb21
-rw-r--r--spec/lib/gitlab/rugged_instrumentation_spec.rb6
-rw-r--r--spec/lib/gitlab/search_context/builder_spec.rb152
-rw-r--r--spec/lib/gitlab/search_context/controller_concern_spec.rb82
-rw-r--r--spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb45
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_spec.rb9
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb47
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb10
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb49
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb65
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb85
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb60
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_versioning/manager_spec.rb3
-rw-r--r--spec/lib/gitlab/sourcegraph_spec.rb2
-rw-r--r--spec/lib/gitlab/suggestions/commit_message_spec.rb87
-rw-r--r--spec/lib/gitlab/suggestions/file_suggestion_spec.rb241
-rw-r--r--spec/lib/gitlab/suggestions/suggestion_set_spec.rb110
-rw-r--r--spec/lib/gitlab/tracking_spec.rb11
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb32
-rw-r--r--spec/lib/gitlab/usage_data_concerns/topology_spec.rb220
-rw-r--r--spec/lib/gitlab/usage_data_counters/search_counter_spec.rb18
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb937
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb111
-rw-r--r--spec/lib/gitlab/utils_spec.rb13
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/global_spec.rb164
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb156
-rw-r--r--spec/lib/gitlab/web_ide/config_spec.rb78
-rw-r--r--spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb2
-rw-r--r--spec/lib/gitlab_spec.rb42
-rw-r--r--spec/lib/milestone_array_spec.rb36
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb117
-rw-r--r--spec/lib/peek/views/bullet_detailed_spec.rb54
-rw-r--r--spec/lib/peek/views/redis_detailed_spec.rb13
-rw-r--r--spec/lib/peek/views/rugged_spec.rb2
-rw-r--r--spec/lib/quality/test_level_spec.rb8
195 files changed, 7473 insertions, 1919 deletions
diff --git a/spec/lib/api/entities/release_spec.rb b/spec/lib/api/entities/release_spec.rb
index c45dbc15856..fa9e1e74f9b 100644
--- a/spec/lib/api/entities/release_spec.rb
+++ b/spec/lib/api/entities/release_spec.rb
@@ -4,11 +4,15 @@ require 'spec_helper'
describe API::Entities::Release do
let_it_be(:project) { create(:project) }
- let_it_be(:release) { create(:release, :with_evidence, project: project) }
+ let(:release) { create(:release, project: project) }
let(:evidence) { release.evidences.first }
let(:user) { create(:user) }
let(:entity) { described_class.new(release, current_user: user).as_json }
+ before do
+ ::Releases::CreateEvidenceService.new(release).execute
+ end
+
describe 'evidences' do
context 'when the current user can download code' do
let(:entity_evidence) { entity[:evidences].first }
diff --git a/spec/lib/api/validations/validators/untrusted_regexp_spec.rb b/spec/lib/api/validations/validators/untrusted_regexp_spec.rb
new file mode 100644
index 00000000000..491bf94fd79
--- /dev/null
+++ b/spec/lib/api/validations/validators/untrusted_regexp_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Validations::Validators::UntrustedRegexp do
+ include ApiValidatorsHelpers
+
+ subject do
+ described_class.new(['test'], {}, false, scope.new)
+ end
+
+ context 'valid regex' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => 'test')
+ expect_no_validation_error('test' => '.*')
+ expect_no_validation_error('test' => Gitlab::Regex.environment_name_regex_chars)
+ end
+ end
+
+ context 'invalid regex' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => '[')
+ expect_validation_error('test' => '*foobar')
+ expect_validation_error('test' => '?foobar')
+ expect_validation_error('test' => '\A[^/%\s]+(..\z')
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/design_reference_filter_spec.rb b/spec/lib/banzai/filter/design_reference_filter_spec.rb
new file mode 100644
index 00000000000..8a6c2e3b3f9
--- /dev/null
+++ b/spec/lib/banzai/filter/design_reference_filter_spec.rb
@@ -0,0 +1,307 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Banzai::Filter::DesignReferenceFilter do
+ include FilterSpecHelper
+ include DesignManagementTestHelpers
+
+ let_it_be(:issue) { create(:issue, iid: 10) }
+ let_it_be(:issue_proj_2) { create(:issue, iid: 20) }
+ let_it_be(:issue_b) { create(:issue, project: issue.project) }
+ let_it_be(:developer) { create(:user, developer_projects: [issue.project, issue_proj_2.project]) }
+ let_it_be(:design_a) { create(:design, :with_versions, issue: issue) }
+ let_it_be(:design_b) { create(:design, :with_versions, issue: issue_b) }
+ let_it_be(:design_proj_2) { create(:design, :with_versions, issue: issue_proj_2) }
+ let_it_be(:project_with_no_lfs) { create(:project, :public, lfs_enabled: false) }
+
+ let(:design) { design_a }
+ let(:project) { issue.project }
+ let(:project_2) { issue_proj_2.project }
+ let(:reference) { design.to_reference }
+ let(:design_url) { url_for_design(design) }
+ let(:input_text) { "Added #{design_url}" }
+ let(:doc) { process_doc(input_text) }
+ let(:current_user) { developer }
+
+ before do
+ enable_design_management
+ end
+
+ shared_examples 'a no-op filter' do
+ it 'does nothing' do
+ expect(process(input_text)).to eq(baseline(input_text).to_html)
+ end
+ end
+
+ shared_examples 'a good link reference' do
+ let(:link) { doc.css('a').first }
+ let(:href) { url_for_design(design) }
+ let(:title) { design.filename }
+
+ it 'produces a good link', :aggregate_failures do
+ expect(link.attr('href')).to eq(href)
+ expect(link.attr('title')).to eq(title)
+ expect(link.attr('class')).to eq('gfm gfm-design has-tooltip')
+ expect(link.attr('data-project')).to eq(design.project.id.to_s)
+ expect(link.attr('data-issue')).to eq(design.issue.id.to_s)
+ expect(link.attr('data-original')).to eq(href)
+ expect(link.attr('data-reference-type')).to eq('design')
+ expect(link.text).to eq(design.to_reference(project))
+ end
+ end
+
+ describe '.call' do
+ it 'requires project context' do
+ expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
+ end
+ end
+
+ it 'does not error when we add redaction to the pipeline' do
+ enable_design_management
+
+ res = reference_pipeline(redact: true).to_document(input_text)
+
+ expect(res.css('a').first).to be_present
+ end
+
+ describe '#call' do
+ describe 'feature flags' do
+ context 'design management is not enabled' do
+ before do
+ enable_design_management(false)
+ end
+
+ it_behaves_like 'a no-op filter'
+ end
+
+ context 'design reference filter is not enabled' do
+ before do
+ stub_feature_flags(described_class::FEATURE_FLAG => false)
+ end
+
+ it_behaves_like 'a no-op filter'
+
+ it 'issues no queries' do
+ expect { process(input_text) }.not_to exceed_query_limit(0)
+ end
+ end
+
+ context 'the filter is enabled for the context project' do
+ before do
+ stub_feature_flags(described_class::FEATURE_FLAG => project)
+ end
+
+ it_behaves_like 'a good link reference'
+ end
+ end
+ end
+
+ %w(pre code a style).each do |elem|
+ context "wrapped in a <#{elem}/>" do
+ let(:input_text) { "<#{elem}>Design #{url_for_design(design)}</#{elem}>" }
+
+ it_behaves_like 'a no-op filter'
+ end
+ end
+
+ describe '.identifier' do
+ where(:filename) do
+ [
+ ['simple.png'],
+ ['SIMPLE.PNG'],
+ ['has spaces.png'],
+ ['has-hyphen.jpg'],
+ ['snake_case.svg'],
+ ['has "quotes".svg'],
+ ['has <special> characters [o].svg']
+ ]
+ end
+
+ with_them do
+ let(:design) { build(:design, issue: issue, filename: filename) }
+ let(:url) { url_for_design(design) }
+ let(:pattern) { described_class.object_class.link_reference_pattern }
+ let(:parsed) do
+ m = pattern.match(url)
+ described_class.identifier(m) if m
+ end
+
+ it 'can parse the reference' do
+ expect(parsed).to have_attributes(
+ filename: filename,
+ issue_iid: issue.iid
+ )
+ end
+ end
+ end
+
+ describe 'static properties' do
+ specify do
+ expect(described_class).to have_attributes(
+ object_sym: :design,
+ object_class: ::DesignManagement::Design
+ )
+ end
+ end
+
+ describe '#data_attributes_for' do
+ let(:subject) { filter_instance.data_attributes_for(input_text, project, design) }
+
+ specify do
+ is_expected.to include(issue: design.issue_id,
+ original: input_text,
+ project: project.id,
+ design: design.id)
+ end
+ end
+
+ context 'a design with a quoted filename' do
+ let(:filename) { %q{A "very" good file.png} }
+ let(:design) { create(:design, :with_versions, issue: issue, filename: filename) }
+
+ it 'links to the design' do
+ expect(doc.css('a').first.attr('href'))
+ .to eq url_for_design(design)
+ end
+ end
+
+ context 'internal reference' do
+ it_behaves_like 'a reference containing an element node'
+
+ context 'the reference is valid' do
+ it_behaves_like 'a good link reference'
+
+ context 'the filename needs to be escaped' do
+ where(:filename) do
+ [
+ ['with some spaces.png'],
+ ['with <script>console.log("pwded")<%2Fscript>.png']
+ ]
+ end
+
+ with_them do
+ let(:design) { create(:design, :with_versions, filename: filename, issue: issue) }
+ let(:link) { doc.css('a').first }
+
+ it 'replaces the content with the reference, but keeps the link', :aggregate_failures do
+ expect(doc.text).to eq(CGI.unescapeHTML("Added #{design.to_reference}"))
+ expect(link.attr('title')).to eq(design.filename)
+ expect(link.attr('href')).to eq(design_url)
+ end
+ end
+ end
+ end
+
+ context 'the reference is to a non-existant design' do
+ let(:design_url) { url_for_design(build(:design, issue: issue)) }
+
+ it_behaves_like 'a no-op filter'
+ end
+
+ context 'design management is disabled for the referenced project' do
+ let(:public_issue) { create(:issue, project: project_with_no_lfs) }
+ let(:design) { create(:design, :with_versions, issue: public_issue) }
+
+ it_behaves_like 'a no-op filter'
+ end
+ end
+
+ describe 'link pattern' do
+ let(:reference) { url_for_design(design) }
+
+ it 'matches' do
+ expect(reference).to match(DesignManagement::Design.link_reference_pattern)
+ end
+ end
+
+ context 'cross-project / cross-namespace complete reference' do
+ let(:design) { design_proj_2 }
+
+ it_behaves_like 'a reference containing an element node'
+
+ it_behaves_like 'a good link reference'
+
+ it 'links to a valid reference' do
+ expect(doc.css('a').first.attr('href')).to eq(design_url)
+ end
+
+ context 'design management is disabled for that project' do
+ let(:design) { create(:design, project: project_with_no_lfs) }
+
+ it_behaves_like 'a no-op filter'
+ end
+
+ it 'link has valid text' do
+ ref = "#{design.project.full_path}##{design.issue.iid}[#{design.filename}]"
+
+ expect(doc.css('a').first.text).to eql(ref)
+ end
+
+ it 'includes default classes' do
+ expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-design has-tooltip'
+ end
+
+ context 'the reference is invalid' do
+ let(:design_url) { url_for_design(design).gsub(/jpg/, 'gif') }
+
+ it_behaves_like 'a no-op filter'
+ end
+ end
+
+ describe 'performance' do
+ it 'is linear in the number of projects with design management enabled each design refers to' do
+ design_c = build(:design, :with_versions, issue: issue)
+ design_d = build(:design, :with_versions, issue: issue_b)
+ design_e = build(:design, :with_versions, issue: build_stubbed(:issue, project: project_2))
+
+ one_ref_per_project = <<~MD
+ Design #{url_for_design(design_a)}, #{url_for_design(design_proj_2)}
+ MD
+
+ multiple_references = <<~MD
+ Designs that affect the count:
+ * #{url_for_design(design_a)}
+ * #{url_for_design(design_b)}
+ * #{url_for_design(design_c)}
+ * #{url_for_design(design_d)}
+ * #{url_for_design(design_proj_2)}
+ * #{url_for_design(design_e)}
+
+ Things that do not affect the count:
+ * #{url_for_design(build_stubbed(:design, project: project_with_no_lfs))}
+ * #{url_for_designs(issue)}
+ * #1[not a valid reference.gif]
+ MD
+
+ baseline = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) }
+
+ # each project mentioned requires 2 queries:
+ #
+ # * SELECT "issues".* FROM "issues" WHERE "issues"."project_id" = 1 AND ...
+ # :in `parent_records'*/
+ # * SELECT "_designs".* FROM "_designs"
+ # WHERE (issue_id = ? AND filename = ?) OR ...
+ # :in `parent_records'*/
+ #
+ # In addition there is a 1 query overhead for all the projects at the
+ # start. Currently, the baseline for 2 projects is `2 * 2 + 1 = 5` queries
+ #
+ expect { process(multiple_references) }.not_to exceed_query_limit(baseline.count)
+ end
+ end
+
+ private
+
+ def process_doc(text)
+ reference_filter(text, project: project)
+ end
+
+ def baseline(text)
+ null_filter(text, project: project)
+ end
+
+ def process(text)
+ process_doc(text).to_html
+ end
+end
diff --git a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
index 78795a157f8..a70c820f97a 100644
--- a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
@@ -5,10 +5,6 @@ require 'spec_helper'
describe Banzai::Filter::ExternalIssueReferenceFilter do
include FilterSpecHelper
- def helper
- IssuesHelper
- end
-
shared_examples_for "external issue tracker" do
it_behaves_like 'a reference containing an element node'
@@ -36,7 +32,7 @@ describe Banzai::Filter::ExternalIssueReferenceFilter do
issue_id = doc.css('a').first.attr("data-external-issue")
expect(doc.css('a').first.attr('href'))
- .to eq helper.url_for_issue(issue_id, project)
+ .to eq project.external_issue_tracker.issue_url(issue_id)
end
it 'links to the external tracker' do
@@ -45,7 +41,7 @@ describe Banzai::Filter::ExternalIssueReferenceFilter do
link = doc.css('a').first.attr('href')
issue_id = doc.css('a').first.attr("data-external-issue")
- expect(link).to eq(helper.url_for_issue(issue_id, project))
+ expect(link).to eq(project.external_issue_tracker.issue_url(issue_id))
end
it 'links with adjacent text' do
@@ -56,7 +52,7 @@ describe Banzai::Filter::ExternalIssueReferenceFilter do
it 'includes a title attribute' do
doc = filter("Issue #{reference}")
- expect(doc.css('a').first.attr('title')).to include("Issue in #{project.issues_tracker.title}")
+ expect(doc.css('a').first.attr('title')).to include("Issue in #{project.external_issue_tracker.title}")
end
it 'escapes the title attribute' do
@@ -78,7 +74,25 @@ describe Banzai::Filter::ExternalIssueReferenceFilter do
link = doc.css('a').first.attr('href')
issue_id = doc.css('a').first["data-external-issue"]
- expect(link).to eq helper.url_for_issue(issue_id, project, only_path: true)
+ expect(link).to eq project.external_issue_tracker.issue_path(issue_id)
+ end
+
+ it 'has an empty link if issue_url is invalid' do
+ expect_any_instance_of(project.external_issue_tracker.class).to receive(:issue_url) { 'javascript:alert("foo");' }
+
+ doc = filter("Issue #{reference}")
+ link = doc.css('a').first.attr('href')
+
+ expect(link).to eq ''
+ end
+
+ it 'has an empty link if issue_path is invalid' do
+ expect_any_instance_of(project.external_issue_tracker.class).to receive(:issue_path) { 'javascript:alert("foo");' }
+
+ doc = filter("Issue #{reference}", only_path: true)
+ link = doc.css('a').first.attr('href')
+
+ expect(link).to eq ''
end
context 'with RequestStore enabled', :request_store do
diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
index 1580177eaad..00d8b871224 100644
--- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
+++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
@@ -7,11 +7,11 @@ describe Banzai::Filter::GollumTagsFilter do
let(:project) { create(:project) }
let(:user) { double }
- let(:project_wiki) { ProjectWiki.new(project, user) }
+ let(:wiki) { ProjectWiki.new(project, user) }
describe 'validation' do
- it 'ensure that a :project_wiki key exists in context' do
- expect { filter("See [[images/image.jpg]]", {}) }.to raise_error ArgumentError, "Missing context keys for Banzai::Filter::GollumTagsFilter: :project_wiki"
+ it 'ensure that a :wiki key exists in context' do
+ expect { filter("See [[images/image.jpg]]", {}) }.to raise_error ArgumentError, "Missing context keys for Banzai::Filter::GollumTagsFilter: :wiki"
end
end
@@ -23,19 +23,19 @@ describe Banzai::Filter::GollumTagsFilter do
path: 'images/image.jpg',
raw_data: '')
wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double)
- expect(project_wiki).to receive(:find_file).with('images/image.jpg').and_return(wiki_file)
+ expect(wiki).to receive(:find_file).with('images/image.jpg').and_return(wiki_file)
tag = '[[images/image.jpg]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
+ doc = filter("See #{tag}", wiki: wiki)
- expect(doc.at_css('img')['data-src']).to eq "#{project_wiki.wiki_base_path}/images/image.jpg"
+ expect(doc.at_css('img')['data-src']).to eq "#{wiki.wiki_base_path}/images/image.jpg"
end
it 'does not creates img tag if image does not exist' do
- expect(project_wiki).to receive(:find_file).with('images/image.jpg').and_return(nil)
+ expect(wiki).to receive(:find_file).with('images/image.jpg').and_return(nil)
tag = '[[images/image.jpg]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
+ doc = filter("See #{tag}", wiki: wiki)
expect(doc.css('img').size).to eq 0
end
@@ -44,14 +44,14 @@ describe Banzai::Filter::GollumTagsFilter do
context 'linking external images' do
it 'creates img tag for valid URL' do
tag = '[[http://example.com/image.jpg]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
+ doc = filter("See #{tag}", wiki: wiki)
expect(doc.at_css('img')['data-src']).to eq "http://example.com/image.jpg"
end
it 'does not creates img tag for invalid URL' do
tag = '[[http://example.com/image.pdf]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
+ doc = filter("See #{tag}", wiki: wiki)
expect(doc.css('img').size).to eq 0
end
@@ -60,7 +60,7 @@ describe Banzai::Filter::GollumTagsFilter do
context 'linking external resources' do
it "the created link's text will be equal to the resource's text" do
tag = '[[http://example.com]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
+ doc = filter("See #{tag}", wiki: wiki)
expect(doc.at_css('a').text).to eq 'http://example.com'
expect(doc.at_css('a')['href']).to eq 'http://example.com'
@@ -68,7 +68,7 @@ describe Banzai::Filter::GollumTagsFilter do
it "the created link's text will be link-text" do
tag = '[[link-text|http://example.com/pdfs/gollum.pdf]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
+ doc = filter("See #{tag}", wiki: wiki)
expect(doc.at_css('a').text).to eq 'link-text'
expect(doc.at_css('a')['href']).to eq 'http://example.com/pdfs/gollum.pdf'
@@ -78,8 +78,8 @@ describe Banzai::Filter::GollumTagsFilter do
context 'linking internal resources' do
it "the created link's text includes the resource's text and wiki base path" do
tag = '[[wiki-slug]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
- expected_path = ::File.join(project_wiki.wiki_base_path, 'wiki-slug')
+ doc = filter("See #{tag}", wiki: wiki)
+ expected_path = ::File.join(wiki.wiki_base_path, 'wiki-slug')
expect(doc.at_css('a').text).to eq 'wiki-slug'
expect(doc.at_css('a')['href']).to eq expected_path
@@ -87,15 +87,15 @@ describe Banzai::Filter::GollumTagsFilter do
it "the created link's text will be link-text" do
tag = '[[link-text|wiki-slug]]'
- doc = filter("See #{tag}", project_wiki: project_wiki)
- expected_path = ::File.join(project_wiki.wiki_base_path, 'wiki-slug')
+ doc = filter("See #{tag}", wiki: wiki)
+ expected_path = ::File.join(wiki.wiki_base_path, 'wiki-slug')
expect(doc.at_css('a').text).to eq 'link-text'
expect(doc.at_css('a')['href']).to eq expected_path
end
it "inside back ticks will be exempt from linkification" do
- doc = filter('<code>[[link-in-backticks]]</code>', project_wiki: project_wiki)
+ doc = filter('<code>[[link-in-backticks]]</code>', wiki: wiki)
expect(doc.at_css('code').text).to eq '[[link-in-backticks]]'
end
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
index 61c59162a30..603da2b4421 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
@@ -11,7 +11,9 @@ describe Banzai::Filter::IssueReferenceFilter do
end
let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project) }
+ let(:issue) { create(:issue, project: project) }
+ let(:issue_path) { "/#{issue.project.namespace.path}/#{issue.project.path}/-/issues/#{issue.iid}" }
+ let(:issue_url) { "http://#{Gitlab.config.gitlab.host}#{issue_path}" }
it 'requires project context' do
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
@@ -46,7 +48,7 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("Fixed #{reference}")
expect(doc.css('a').first.attr('href'))
- .to eq helper.url_for_issue(issue.iid, project)
+ .to eq issue_url
end
it 'links with adjacent text' do
@@ -113,7 +115,7 @@ describe Banzai::Filter::IssueReferenceFilter do
link = doc.css('a').first.attr('href')
expect(link).not_to match %r(https?://)
- expect(link).to eq helper.url_for_issue(issue.iid, project, only_path: true)
+ expect(link).to eq issue_path
end
it 'does not process links containing issue numbers followed by text' do
@@ -145,7 +147,7 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference}")
expect(doc.css('a').first.attr('href'))
- .to eq helper.url_for_issue(issue.iid, project2)
+ .to eq issue_url
end
it 'link has valid text' do
@@ -195,7 +197,7 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference}")
expect(doc.css('a').first.attr('href'))
- .to eq helper.url_for_issue(issue.iid, project2)
+ .to eq issue_url
end
it 'link has valid text' do
@@ -245,7 +247,7 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference}")
expect(doc.css('a').first.attr('href'))
- .to eq helper.url_for_issue(issue.iid, project2)
+ .to eq issue_url
end
it 'link has valid text' do
@@ -279,7 +281,7 @@ describe Banzai::Filter::IssueReferenceFilter do
let(:namespace) { create(:namespace, name: 'cross-reference') }
let(:project2) { create(:project, :public, namespace: namespace) }
let(:issue) { create(:issue, project: project2) }
- let(:reference) { helper.url_for_issue(issue.iid, project2) + "#note_123" }
+ let(:reference) { issue_url + "#note_123" }
it 'links to a valid reference' do
doc = reference_filter("See #{reference}")
@@ -314,7 +316,7 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference_link}")
expect(doc.css('a').first.attr('href'))
- .to eq helper.url_for_issue(issue.iid, project2)
+ .to eq issue_url
end
it 'links with adjacent text' do
@@ -336,14 +338,14 @@ describe Banzai::Filter::IssueReferenceFilter do
let(:namespace) { create(:namespace, name: 'cross-reference') }
let(:project2) { create(:project, :public, namespace: namespace) }
let(:issue) { create(:issue, project: project2) }
- let(:reference) { "#{helper.url_for_issue(issue.iid, project2) + "#note_123"}" }
+ let(:reference) { "#{issue_url + "#note_123"}" }
let(:reference_link) { %{<a href="#{reference}">Reference</a>} }
it 'links to a valid reference' do
doc = reference_filter("See #{reference_link}")
expect(doc.css('a').first.attr('href'))
- .to eq helper.url_for_issue(issue.iid, project2) + "#note_123"
+ .to eq issue_url + "#note_123"
end
it 'links with adjacent text' do
@@ -374,6 +376,16 @@ describe Banzai::Filter::IssueReferenceFilter do
expect(link.attr('href')).to eq(designs_tab_url)
expect(link.text).to eq("#{issue.to_reference} (designs)")
end
+
+ context 'design management is not available' do
+ before do
+ enable_design_management(false)
+ end
+
+ it 'links to the issue, but not to the designs tab' do
+ expect(link.text).to eq(issue.to_reference)
+ end
+ end
end
context 'group context' do
@@ -403,7 +415,7 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference}", context)
link = doc.css('a').first
- expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project))
+ expect(link.attr('href')).to eq(issue_url)
expect(link.text).to include("#{project.full_path}##{issue.iid}")
end
@@ -415,23 +427,23 @@ describe Banzai::Filter::IssueReferenceFilter do
end
it 'links to a valid reference for url cross-reference' do
- reference = helper.url_for_issue(issue.iid, project) + "#note_123"
+ reference = issue_url + "#note_123"
doc = reference_filter("See #{reference}", context)
link = doc.css('a').first
- expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project) + "#note_123")
+ expect(link.attr('href')).to eq(issue_url + "#note_123")
expect(link.text).to include("#{project.full_path}##{issue.iid}")
end
it 'links to a valid reference for cross-reference in link href' do
- reference = "#{helper.url_for_issue(issue.iid, project) + "#note_123"}"
+ reference = "#{issue_url + "#note_123"}"
reference_link = %{<a href="#{reference}">Reference</a>}
doc = reference_filter("See #{reference_link}", context)
link = doc.css('a').first
- expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project) + "#note_123")
+ expect(link.attr('href')).to eq(issue_url + "#note_123")
expect(link.text).to include('Reference')
end
@@ -441,7 +453,7 @@ describe Banzai::Filter::IssueReferenceFilter do
doc = reference_filter("See #{reference_link}", context)
link = doc.css('a').first
- expect(link.attr('href')).to eq(helper.url_for_issue(issue.iid, project))
+ expect(link.attr('href')).to eq(issue_url)
expect(link.text).to include('Reference')
end
end
diff --git a/spec/lib/banzai/filter/label_reference_filter_spec.rb b/spec/lib/banzai/filter/label_reference_filter_spec.rb
index de7a70db1ac..0b697ab2040 100644
--- a/spec/lib/banzai/filter/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/label_reference_filter_spec.rb
@@ -47,14 +47,34 @@ describe Banzai::Filter::LabelReferenceFilter do
expect(link.attr('data-label')).to eq label.id.to_s
end
- it 'supports an :only_path context' do
+ it 'includes protocol when :only_path not present' do
+ doc = reference_filter("Label #{reference}")
+ link = doc.css('a').first.attr('href')
+
+ expect(link).to match %r(https?://)
+ end
+
+ it 'does not include protocol when :only_path true' do
doc = reference_filter("Label #{reference}", only_path: true)
link = doc.css('a').first.attr('href')
expect(link).not_to match %r(https?://)
+ end
+
+ it 'links to issue list when :label_url_method is not present' do
+ doc = reference_filter("Label #{reference}", only_path: true)
+ link = doc.css('a').first.attr('href')
+
expect(link).to eq urls.project_issues_path(project, label_name: label.name)
end
+ it 'links to merge request list when `label_url_method: :project_merge_requests_url`' do
+ doc = reference_filter("Label #{reference}", { only_path: true, label_url_method: "project_merge_requests_url" })
+ link = doc.css('a').first.attr('href')
+
+ expect(link).to eq urls.project_merge_requests_path(project, label_name: label.name)
+ end
+
context 'project that does not exist referenced' do
let(:result) { reference_filter('aaa/bbb~ccc') }
diff --git a/spec/lib/banzai/filter/repository_link_filter_spec.rb b/spec/lib/banzai/filter/repository_link_filter_spec.rb
index 460c76acd78..81f93f885f7 100644
--- a/spec/lib/banzai/filter/repository_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/repository_link_filter_spec.rb
@@ -12,7 +12,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
project: project,
current_user: user,
group: group,
- project_wiki: project_wiki,
+ wiki: wiki,
ref: ref,
requested_path: requested_path,
only_path: only_path
@@ -53,7 +53,7 @@ describe Banzai::Filter::RepositoryLinkFilter do
let(:project_path) { project.full_path }
let(:ref) { 'markdown' }
let(:commit) { project.commit(ref) }
- let(:project_wiki) { nil }
+ let(:wiki) { nil }
let(:requested_path) { '/' }
let(:only_path) { true }
@@ -94,8 +94,8 @@ describe Banzai::Filter::RepositoryLinkFilter do
end
end
- context 'with a project_wiki' do
- let(:project_wiki) { double('ProjectWiki') }
+ context 'with a wiki' do
+ let(:wiki) { double('ProjectWiki') }
include_examples :preserve_unchanged
end
diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
index 4587bd85939..827f38ef717 100644
--- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
@@ -12,13 +12,13 @@ describe Banzai::Filter::WikiLinkFilter do
let(:repository_upload_folder) { Wikis::CreateAttachmentService::ATTACHMENT_PATH }
it "doesn't rewrite absolute links" do
- filtered_link = filter("<a href='http://example.com:8000/'>Link</a>", project_wiki: wiki).children[0]
+ filtered_link = filter("<a href='http://example.com:8000/'>Link</a>", wiki: wiki).children[0]
expect(filtered_link.attribute('href').value).to eq('http://example.com:8000/')
end
it "doesn't rewrite links to project uploads" do
- filtered_link = filter("<a href='/uploads/a.test'>Link</a>", project_wiki: wiki).children[0]
+ filtered_link = filter("<a href='/uploads/a.test'>Link</a>", wiki: wiki).children[0]
expect(filtered_link.attribute('href').value).to eq('/uploads/a.test')
end
@@ -26,7 +26,7 @@ describe Banzai::Filter::WikiLinkFilter do
describe "when links point to the #{Wikis::CreateAttachmentService::ATTACHMENT_PATH} folder" do
context 'with an "a" html tag' do
it 'rewrites links' do
- filtered_link = filter("<a href='#{repository_upload_folder}/a.test'>Link</a>", project_wiki: wiki).children[0]
+ filtered_link = filter("<a href='#{repository_upload_folder}/a.test'>Link</a>", wiki: wiki).children[0]
expect(filtered_link.attribute('href').value).to eq("#{wiki.wiki_base_path}/#{repository_upload_folder}/a.test")
end
@@ -37,7 +37,7 @@ describe Banzai::Filter::WikiLinkFilter do
context 'inside an "a" html tag' do
it 'rewrites links' do
- filtered_elements = filter("<a href='#{repository_upload_folder}/a.jpg'><img src='#{repository_upload_folder}/a.jpg'>example</img></a>", project_wiki: wiki)
+ filtered_elements = filter("<a href='#{repository_upload_folder}/a.jpg'><img src='#{repository_upload_folder}/a.jpg'>example</img></a>", wiki: wiki)
expect(filtered_elements.search('img').first.attribute('src').value).to eq(path)
expect(filtered_elements.search('a').first.attribute('href').value).to eq(path)
@@ -46,7 +46,7 @@ describe Banzai::Filter::WikiLinkFilter do
context 'outside an "a" html tag' do
it 'rewrites links' do
- filtered_link = filter("<img src='#{repository_upload_folder}/a.jpg'>example</img>", project_wiki: wiki).children[0]
+ filtered_link = filter("<img src='#{repository_upload_folder}/a.jpg'>example</img>", wiki: wiki).children[0]
expect(filtered_link.attribute('src').value).to eq(path)
end
@@ -55,7 +55,7 @@ describe Banzai::Filter::WikiLinkFilter do
context 'with "video" html tag' do
it 'rewrites links' do
- filtered_link = filter("<video src='#{repository_upload_folder}/a.mp4'></video>", project_wiki: wiki).children[0]
+ filtered_link = filter("<video src='#{repository_upload_folder}/a.mp4'></video>", wiki: wiki).children[0]
expect(filtered_link.attribute('src').value).to eq("#{wiki.wiki_base_path}/#{repository_upload_folder}/a.mp4")
end
@@ -63,7 +63,7 @@ describe Banzai::Filter::WikiLinkFilter do
context 'with "audio" html tag' do
it 'rewrites links' do
- filtered_link = filter("<audio src='#{repository_upload_folder}/a.wav'></audio>", project_wiki: wiki).children[0]
+ filtered_link = filter("<audio src='#{repository_upload_folder}/a.wav'></audio>", wiki: wiki).children[0]
expect(filtered_link.attribute('src').value).to eq("#{wiki.wiki_base_path}/#{repository_upload_folder}/a.wav")
end
@@ -75,7 +75,7 @@ describe Banzai::Filter::WikiLinkFilter do
invalid_links.each do |invalid_link|
it "doesn't rewrite invalid invalid_links like #{invalid_link}" do
- filtered_link = filter("<a href='#{invalid_link}'>Link</a>", project_wiki: wiki).children[0]
+ filtered_link = filter("<a href='#{invalid_link}'>Link</a>", wiki: wiki).children[0]
expect(filtered_link.attribute('href').value).to eq(invalid_link)
end
diff --git a/spec/lib/banzai/pipeline/description_pipeline_spec.rb b/spec/lib/banzai/pipeline/description_pipeline_spec.rb
index 5ecd3df5151..6778a273bba 100644
--- a/spec/lib/banzai/pipeline/description_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/description_pipeline_spec.rb
@@ -3,12 +3,14 @@
require 'spec_helper'
describe Banzai::Pipeline::DescriptionPipeline do
+ let_it_be(:project) { create(:project) }
+
def parse(html)
# When we pass HTML to Redcarpet, it gets wrapped in `p` tags...
# ...except when we pass it pre-wrapped text. Rabble rabble.
unwrap = !html.start_with?('<p ')
- output = described_class.to_html(html, project: spy)
+ output = described_class.to_html(html, project: project)
output.gsub!(%r{\A<p dir="auto">(.*)</p>(.*)\z}, '\1\2') if unwrap
diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
index 4d16c568c13..b2c24284eb9 100644
--- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
@@ -3,6 +3,11 @@
require 'spec_helper'
describe Banzai::Pipeline::WikiPipeline do
+ let_it_be(:namespace) { create(:namespace, name: "wiki_link_ns") }
+ let_it_be(:project) { create(:project, :public, name: "wiki_link_project", namespace: namespace) }
+ let_it_be(:wiki) { ProjectWiki.new(project, double(:user)) }
+ let_it_be(:page) { build(:wiki_page, wiki: wiki, title: 'nested/twice/start-page') }
+
describe 'TableOfContents' do
it 'replaces the tag with the TableOfContentsFilter result' do
markdown = <<-MD.strip_heredoc
@@ -13,7 +18,7 @@ describe Banzai::Pipeline::WikiPipeline do
Foo
MD
- result = described_class.call(markdown, project: spy, project_wiki: spy)
+ result = described_class.call(markdown, project: project, wiki: wiki)
aggregate_failures do
expect(result[:output].text).not_to include '[['
@@ -31,7 +36,7 @@ describe Banzai::Pipeline::WikiPipeline do
Foo
MD
- output = described_class.to_html(markdown, project: spy, project_wiki: spy)
+ output = described_class.to_html(markdown, project: project, wiki: wiki)
expect(output).to include('[[<em>toc</em>]]')
end
@@ -44,7 +49,7 @@ describe Banzai::Pipeline::WikiPipeline do
Foo
MD
- output = described_class.to_html(markdown, project: spy, project_wiki: spy)
+ output = described_class.to_html(markdown, project: project, wiki: wiki)
aggregate_failures do
expect(output).not_to include('<ul>')
@@ -54,30 +59,25 @@ describe Banzai::Pipeline::WikiPipeline do
end
describe "Links" do
- let(:namespace) { create(:namespace, name: "wiki_link_ns") }
- let(:project) { create(:project, :public, name: "wiki_link_project", namespace: namespace) }
- let(:project_wiki) { ProjectWiki.new(project, double(:user)) }
- let(:page) { build(:wiki_page, wiki: project_wiki, title: 'nested/twice/start-page') }
-
{ 'when GitLab is hosted at a root URL' => '',
'when GitLab is hosted at a relative URL' => '/nested/relative/gitlab' }.each do |test_name, relative_url_root|
context test_name do
before do
- allow(Gitlab.config.gitlab).to receive(:relative_url_root).and_return(relative_url_root)
+ allow(Rails.application.routes).to receive(:default_url_options).and_return(script_name: relative_url_root)
end
describe "linking to pages within the wiki" do
context "when creating hierarchical links to the current directory" do
it "rewrites non-file links to be at the scope of the current directory" do
markdown = "[Page](./page)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/page\"")
end
it "rewrites file links to be at the scope of the current directory" do
markdown = "[Link to Page](./page.md)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/page.md\"")
end
@@ -86,14 +86,14 @@ describe Banzai::Pipeline::WikiPipeline do
context "when creating hierarchical links to the parent directory" do
it "rewrites non-file links to be at the scope of the parent directory" do
markdown = "[Link to Page](../page)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/page\"")
end
it "rewrites file links to be at the scope of the parent directory" do
markdown = "[Link to Page](../page.md)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/page.md\"")
end
@@ -102,14 +102,14 @@ describe Banzai::Pipeline::WikiPipeline do
context "when creating hierarchical links to a sub-directory" do
it "rewrites non-file links to be at the scope of the sub-directory" do
markdown = "[Link to Page](./subdirectory/page)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/subdirectory/page\"")
end
it "rewrites file links to be at the scope of the sub-directory" do
markdown = "[Link to Page](./subdirectory/page.md)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/subdirectory/page.md\"")
end
@@ -118,35 +118,35 @@ describe Banzai::Pipeline::WikiPipeline do
describe "when creating non-hierarchical links" do
it 'rewrites non-file links to be at the scope of the wiki root' do
markdown = "[Link to Page](page)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page\"")
end
it 'rewrites non-file links (with spaces) to be at the scope of the wiki root' do
markdown = "[Link to Page](page slug)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page%20slug\"")
end
it "rewrites file links to be at the scope of the current directory" do
markdown = "[Link to Page](page.md)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/page.md\"")
end
it 'rewrites links with anchor' do
markdown = '[Link to Header](start-page#title)'
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/start-page#title\"")
end
it 'rewrites links (with spaces) with anchor' do
markdown = '[Link to Header](start page#title)'
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/start%20page#title\"")
end
@@ -155,14 +155,14 @@ describe Banzai::Pipeline::WikiPipeline do
describe "when creating root links" do
it 'rewrites non-file links to be at the scope of the wiki root' do
markdown = "[Link to Page](/page)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page\"")
end
it 'rewrites file links to be at the scope of the wiki root' do
markdown = "[Link to Page](/page.md)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include("href=\"#{relative_url_root}/wiki_link_ns/wiki_link_project/-/wikis/page.md\"")
end
@@ -172,7 +172,7 @@ describe Banzai::Pipeline::WikiPipeline do
describe "linking to pages outside the wiki (absolute)" do
it "doesn't rewrite links" do
markdown = "[Link to Page](http://example.com/page)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include('href="http://example.com/page"')
end
@@ -188,7 +188,7 @@ describe Banzai::Pipeline::WikiPipeline do
output = described_class.to_html(
"[Link](./alert(1);)",
project: project,
- project_wiki: project_wiki,
+ wiki: wiki,
page_slug: valid_slug
)
@@ -199,7 +199,7 @@ describe Banzai::Pipeline::WikiPipeline do
output = described_class.to_html(
"[Link](../alert(1);)",
project: project,
- project_wiki: project_wiki,
+ wiki: wiki,
page_slug: valid_slug
)
@@ -236,7 +236,7 @@ describe Banzai::Pipeline::WikiPipeline do
output = described_class.to_html(
"[Link](./#{link})",
project: project,
- project_wiki: project_wiki,
+ wiki: wiki,
page_slug: slug
)
@@ -247,7 +247,7 @@ describe Banzai::Pipeline::WikiPipeline do
output = described_class.to_html(
"[Link](../#{link})",
project: project,
- project_wiki: project_wiki,
+ wiki: wiki,
page_slug: slug
)
@@ -261,35 +261,30 @@ describe Banzai::Pipeline::WikiPipeline do
end
describe 'videos and audio' do
- let_it_be(:namespace) { create(:namespace, name: "wiki_link_ns") }
- let_it_be(:project) { create(:project, :public, name: "wiki_link_project", namespace: namespace) }
- let_it_be(:project_wiki) { ProjectWiki.new(project, double(:user)) }
- let_it_be(:page) { build(:wiki_page, wiki: project_wiki, title: 'nested/twice/start-page') }
-
it 'generates video html structure' do
markdown = "![video_file](video_file_name.mp4)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/video_file_name.mp4"')
end
it 'rewrites and replaces video links names with white spaces to %20' do
markdown = "![video file](video file name.mp4)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include('<video src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/video%20file%20name.mp4"')
end
it 'generates audio html structure' do
markdown = "![audio_file](audio_file_name.wav)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include('<audio src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/audio_file_name.wav"')
end
it 'rewrites and replaces audio links names with white spaces to %20' do
markdown = "![audio file](audio file name.wav)"
- output = described_class.to_html(markdown, project: project, project_wiki: project_wiki, page_slug: page.slug)
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
expect(output).to include('<audio src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/audio%20file%20name.wav"')
end
diff --git a/spec/lib/constraints/feature_constrainer_spec.rb b/spec/lib/constraints/feature_constrainer_spec.rb
index 0739da801a7..7665d5b3547 100644
--- a/spec/lib/constraints/feature_constrainer_spec.rb
+++ b/spec/lib/constraints/feature_constrainer_spec.rb
@@ -5,9 +5,12 @@ require 'spec_helper'
describe Constraints::FeatureConstrainer do
describe '#matches' do
it 'calls Feature.enabled? with the correct arguments' do
- expect(Feature).to receive(:enabled?).with(:feature_name, "an object", default_enabled: true)
+ gate = stub_feature_flag_gate("an object")
- described_class.new(:feature_name, "an object", default_enabled: true).matches?(double('request'))
+ expect(Feature).to receive(:enabled?)
+ .with(:feature_name, gate, default_enabled: true)
+
+ described_class.new(:feature_name, gate, default_enabled: true).matches?(double('request'))
end
end
end
diff --git a/spec/lib/extracts_path_spec.rb b/spec/lib/extracts_path_spec.rb
index ef9929a9dce..a56768a1a88 100644
--- a/spec/lib/extracts_path_spec.rb
+++ b/spec/lib/extracts_path_spec.rb
@@ -7,17 +7,15 @@ describe ExtractsPath do
include RepoHelpers
include Gitlab::Routing
- let(:project) { double('project') }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:container) { create(:project, :repository, creator: owner) }
let(:request) { double('request') }
before do
- @project = project
+ @project = container
+ ref_names = ['master', 'foo/bar/baz', 'v1.0.0', 'v2.0.0', 'release/app', 'release/app/v1.0.0']
- repo = double(ref_names: ['master', 'foo/bar/baz', 'v1.0.0', 'v2.0.0',
- 'release/app', 'release/app/v1.0.0'])
- allow(project).to receive(:repository).and_return(repo)
- allow(project).to receive(:full_path)
- .and_return('gitlab/gitlab-ci')
+ allow(container.repository).to receive(:ref_names).and_return(ref_names)
allow(request).to receive(:format=)
end
@@ -25,45 +23,12 @@ describe ExtractsPath do
let(:ref) { sample_commit[:id] }
let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
- before do
- @project = create(:project, :repository)
- end
+ it_behaves_like 'assigns ref vars'
- it "log tree path has no escape sequences" do
+ it 'log tree path has no escape sequences' do
assign_ref_vars
- expect(@logs_path).to eq("/#{@project.full_path}/-/refs/#{ref}/logs_tree/files/ruby/popen.rb")
- end
-
- context 'ref contains %20' do
- let(:ref) { 'foo%20bar' }
-
- it 'is not converted to a space in @id' do
- @project.repository.add_branch(@project.owner, 'foo%20bar', 'master')
-
- assign_ref_vars
-
- expect(@id).to start_with('foo%20bar/')
- end
- end
-
- context 'ref contains trailing space' do
- let(:ref) { 'master ' }
-
- it 'strips surrounding space' do
- assign_ref_vars
- expect(@ref).to eq('master')
- end
- end
-
- context 'ref contains leading space' do
- let(:ref) { ' master ' }
-
- it 'strips surrounding space' do
- assign_ref_vars
-
- expect(@ref).to eq('master')
- end
+ expect(@logs_path).to eq("/#{@project.full_path}/-/refs/#{ref}/logs_tree/files/ruby/popen.rb")
end
context 'ref contains space in the middle' do
@@ -76,28 +41,6 @@ describe ExtractsPath do
end
end
- context 'path contains space' do
- let(:params) { { path: 'with space', ref: '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e' } }
-
- it 'is not converted to %20 in @path' do
- assign_ref_vars
-
- expect(@path).to eq(params[:path])
- end
- end
-
- context 'subclass overrides get_id' do
- it 'uses ref returned by get_id' do
- allow_next_instance_of(self.class) do |instance|
- allow(instance).to receive(:get_id) { '38008cb17ce1466d8fec2dfa6f6ab8dcfe5cf49e' }
- end
-
- assign_ref_vars
-
- expect(@id).to eq(get_id)
- end
- end
-
context 'ref only exists without .atom suffix' do
context 'with a path' do
let(:params) { { ref: 'v1.0.0.atom', path: 'README.md' } }
@@ -171,58 +114,7 @@ describe ExtractsPath do
end
end
- describe '#extract_ref' do
- it "returns an empty pair when no @project is set" do
- @project = nil
- expect(extract_ref('master/CHANGELOG')).to eq(['', ''])
- end
-
- context "without a path" do
- it "extracts a valid branch" do
- expect(extract_ref('master')).to eq(['master', ''])
- end
-
- it "extracts a valid tag" do
- expect(extract_ref('v2.0.0')).to eq(['v2.0.0', ''])
- end
-
- it "extracts a valid commit ref without a path" do
- expect(extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062')).to eq(
- ['f4b14494ef6abf3d144c28e4af0c20143383e062', '']
- )
- end
-
- it "falls back to a primitive split for an invalid ref" do
- expect(extract_ref('stable')).to eq(['stable', ''])
- end
-
- it "extracts the longest matching ref" do
- expect(extract_ref('release/app/v1.0.0/README.md')).to eq(
- ['release/app/v1.0.0', 'README.md'])
- end
- end
-
- context "with a path" do
- it "extracts a valid branch" do
- expect(extract_ref('foo/bar/baz/CHANGELOG')).to eq(
- ['foo/bar/baz', 'CHANGELOG'])
- end
-
- it "extracts a valid tag" do
- expect(extract_ref('v2.0.0/CHANGELOG')).to eq(['v2.0.0', 'CHANGELOG'])
- end
-
- it "extracts a valid commit SHA" do
- expect(extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062/CHANGELOG')).to eq(
- %w(f4b14494ef6abf3d144c28e4af0c20143383e062 CHANGELOG)
- )
- end
-
- it "falls back to a primitive split for an invalid ref" do
- expect(extract_ref('stable/CHANGELOG')).to eq(%w(stable CHANGELOG))
- end
- end
- end
+ it_behaves_like 'extracts refs'
describe '#extract_ref_without_atom' do
it 'ignores any matching refs suffixed with atom' do
diff --git a/spec/lib/extracts_ref_spec.rb b/spec/lib/extracts_ref_spec.rb
new file mode 100644
index 00000000000..1867f639711
--- /dev/null
+++ b/spec/lib/extracts_ref_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ExtractsRef do
+ include described_class
+ include RepoHelpers
+
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:container) { create(:snippet, :repository, author: owner) }
+ let(:ref) { sample_commit[:id] }
+ let(:params) { { path: sample_commit[:line_code_path], ref: ref } }
+
+ before do
+ ref_names = ['master', 'foo/bar/baz', 'v1.0.0', 'v2.0.0', 'release/app', 'release/app/v1.0.0']
+
+ allow(container.repository).to receive(:ref_names).and_return(ref_names)
+ allow_any_instance_of(described_class).to receive(:repository_container).and_return(container)
+ end
+
+ it_behaves_like 'assigns ref vars'
+ it_behaves_like 'extracts refs'
+end
diff --git a/spec/lib/feature/gitaly_spec.rb b/spec/lib/feature/gitaly_spec.rb
index 08651c42276..6654b7627cd 100644
--- a/spec/lib/feature/gitaly_spec.rb
+++ b/spec/lib/feature/gitaly_spec.rb
@@ -25,7 +25,7 @@ describe Feature::Gitaly do
describe ".server_feature_flags" do
before do
- allow(Feature).to receive(:persisted_names).and_return(%w[gitaly_mep_mep foo])
+ stub_feature_flags(gitaly_mep_mep: true, foo: true)
end
subject { described_class.server_feature_flags }
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 81fa2dc5cad..37f8d3ad47d 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -2,12 +2,10 @@
require 'spec_helper'
-describe Feature do
+describe Feature, stub_feature_flags: false do
before do
- # We mock all calls to .enabled? to return true in order to force all
- # specs to run the feature flag gated behavior, but here we need a clean
- # behavior from the class
- allow(described_class).to receive(:enabled?).and_call_original
+ # reset Flipper AR-engine
+ Feature.reset
end
describe '.get' do
@@ -23,67 +21,106 @@ describe Feature do
end
describe '.persisted_names' do
- it 'returns the names of the persisted features' do
- Feature::FlipperFeature.create!(key: 'foo')
+ context 'when FF_LEGACY_PERSISTED_NAMES=false' do
+ before do
+ stub_env('FF_LEGACY_PERSISTED_NAMES', 'false')
+ end
- expect(described_class.persisted_names).to eq(%w[foo])
- end
+ it 'returns the names of the persisted features' do
+ Feature.enable('foo')
+
+ expect(described_class.persisted_names).to contain_exactly('foo')
+ end
+
+ it 'returns an empty Array when no features are presisted' do
+ expect(described_class.persisted_names).to be_empty
+ end
+
+ it 'caches the feature names when request store is active',
+ :request_store, :use_clean_rails_memory_store_caching do
+ Feature.enable('foo')
- it 'returns an empty Array when no features are presisted' do
- expect(described_class.persisted_names).to be_empty
+ expect(Gitlab::ProcessMemoryCache.cache_backend)
+ .to receive(:fetch)
+ .once
+ .with('flipper/v1/features', expires_in: 1.minute)
+ .and_call_original
+
+ 2.times do
+ expect(described_class.persisted_names).to contain_exactly('foo')
+ end
+ end
end
- it 'caches the feature names when request store is active',
+ context 'when FF_LEGACY_PERSISTED_NAMES=true' do
+ before do
+ stub_env('FF_LEGACY_PERSISTED_NAMES', 'true')
+ end
+
+ it 'returns the names of the persisted features' do
+ Feature.enable('foo')
+
+ expect(described_class.persisted_names).to contain_exactly('foo')
+ end
+
+ it 'returns an empty Array when no features are presisted' do
+ expect(described_class.persisted_names).to be_empty
+ end
+
+ it 'caches the feature names when request store is active',
:request_store, :use_clean_rails_memory_store_caching do
- Feature::FlipperFeature.create!(key: 'foo')
+ Feature.enable('foo')
- expect(Feature::FlipperFeature)
- .to receive(:feature_names)
- .once
- .and_call_original
+ expect(Gitlab::ProcessMemoryCache.cache_backend)
+ .to receive(:fetch)
+ .once
+ .with('flipper:persisted_names', expires_in: 1.minute)
+ .and_call_original
- expect(Gitlab::ProcessMemoryCache.cache_backend)
- .to receive(:fetch)
- .once
- .with('flipper:persisted_names', expires_in: 1.minute)
- .and_call_original
+ 2.times do
+ expect(described_class.persisted_names).to contain_exactly('foo')
+ end
+ end
+ end
- 2.times do
- expect(described_class.persisted_names).to eq(%w[foo])
+ it 'fetches all flags once in a single query', :request_store do
+ Feature.enable('foo1')
+ Feature.enable('foo2')
+
+ queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
+
+ RequestStore.clear!
+
+ expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2')
end
+
+ expect(queries.count).to eq(1)
end
end
- describe '.persisted?' do
+ describe '.persisted_name?' do
context 'when the feature is persisted' do
it 'returns true when feature name is a string' do
- Feature::FlipperFeature.create!(key: 'foo')
-
- feature = double(:feature, name: 'foo')
+ Feature.enable('foo')
- expect(described_class.persisted?(feature)).to eq(true)
+ expect(described_class.persisted_name?('foo')).to eq(true)
end
it 'returns true when feature name is a symbol' do
- Feature::FlipperFeature.create!(key: 'foo')
+ Feature.enable('foo')
- feature = double(:feature, name: :foo)
-
- expect(described_class.persisted?(feature)).to eq(true)
+ expect(described_class.persisted_name?(:foo)).to eq(true)
end
end
context 'when the feature is not persisted' do
it 'returns false when feature name is a string' do
- feature = double(:feature, name: 'foo')
-
- expect(described_class.persisted?(feature)).to eq(false)
+ expect(described_class.persisted_name?('foo')).to eq(false)
end
it 'returns false when feature name is a symbol' do
- feature = double(:feature, name: :bar)
-
- expect(described_class.persisted?(feature)).to eq(false)
+ expect(described_class.persisted_name?(:bar)).to eq(false)
end
end
end
@@ -100,16 +137,12 @@ describe Feature do
end
describe '.flipper' do
- before do
- described_class.instance_variable_set(:@flipper, nil)
- end
-
context 'when request store is inactive' do
it 'memoizes the Flipper instance' do
expect(Flipper).to receive(:new).once.and_call_original
2.times do
- described_class.flipper
+ described_class.send(:flipper)
end
end
end
@@ -118,9 +151,9 @@ describe Feature do
it 'memoizes the Flipper instance' do
expect(Flipper).to receive(:new).once.and_call_original
- described_class.flipper
+ described_class.send(:flipper)
described_class.instance_variable_set(:@flipper, nil)
- described_class.flipper
+ described_class.send(:flipper)
end
end
end
@@ -146,21 +179,21 @@ describe Feature do
expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy
end
- it { expect(described_class.l1_cache_backend).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
- it { expect(described_class.l2_cache_backend).to eq(Rails.cache) }
+ it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
+ it { expect(described_class.send(:l2_cache_backend)).to eq(Rails.cache) }
it 'caches the status in L1 and L2 caches',
:request_store, :use_clean_rails_memory_store_caching do
described_class.enable(:enabled_feature_flag)
flipper_key = "flipper/v1/feature/enabled_feature_flag"
- expect(described_class.l2_cache_backend)
+ expect(described_class.send(:l2_cache_backend))
.to receive(:fetch)
.once
.with(flipper_key, expires_in: 1.hour)
.and_call_original
- expect(described_class.l1_cache_backend)
+ expect(described_class.send(:l1_cache_backend))
.to receive(:fetch)
.once
.with(flipper_key, expires_in: 1.minute)
@@ -182,14 +215,14 @@ describe Feature do
let(:flag) { :some_feature_flag }
before do
- described_class.flipper.memoize = false
+ described_class.send(:flipper).memoize = false
described_class.enabled?(flag)
end
it 'caches the status in L1 cache for the first minute' do
expect do
- expect(described_class.l1_cache_backend).to receive(:fetch).once.and_call_original
- expect(described_class.l2_cache_backend).not_to receive(:fetch)
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch)
expect(described_class.enabled?(flag)).to be_truthy
end.not_to exceed_query_limit(0)
end
@@ -197,8 +230,8 @@ describe Feature do
it 'caches the status in L2 cache after 2 minutes' do
Timecop.travel 2.minutes do
expect do
- expect(described_class.l1_cache_backend).to receive(:fetch).once.and_call_original
- expect(described_class.l2_cache_backend).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
expect(described_class.enabled?(flag)).to be_truthy
end.not_to exceed_query_limit(0)
end
@@ -207,8 +240,8 @@ describe Feature do
it 'fetches the status after an hour' do
Timecop.travel 61.minutes do
expect do
- expect(described_class.l1_cache_backend).to receive(:fetch).once.and_call_original
- expect(described_class.l2_cache_backend).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original
+ expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original
expect(described_class.enabled?(flag)).to be_truthy
end.not_to exceed_query_limit(1)
end
@@ -216,10 +249,8 @@ describe Feature do
end
context 'with an individual actor' do
- CustomActor = Struct.new(:flipper_id)
-
- let(:actor) { CustomActor.new(flipper_id: 'CustomActor:5') }
- let(:another_actor) { CustomActor.new(flipper_id: 'CustomActor:10') }
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
before do
described_class.enable(:enabled_feature_flag, actor)
@@ -237,6 +268,17 @@ describe Feature do
expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey
end
end
+
+ context 'with invalid actor' do
+ let(:actor) { double('invalid actor') }
+
+ context 'when is dev_or_test_env' do
+ it 'does raise exception' do
+ expect { described_class.enabled?(:enabled_feature_flag, actor) }
+ .to raise_error /needs to include `FeatureGate` or implement `flipper_id`/
+ end
+ end
+ end
end
describe '.disable?' do
diff --git a/spec/lib/gitaly/server_spec.rb b/spec/lib/gitaly/server_spec.rb
index 5142f705251..390855b30ad 100644
--- a/spec/lib/gitaly/server_spec.rb
+++ b/spec/lib/gitaly/server_spec.rb
@@ -20,6 +20,7 @@ describe Gitaly::Server do
it { is_expected.to respond_to(:git_binary_version) }
it { is_expected.to respond_to(:up_to_date?) }
it { is_expected.to respond_to(:address) }
+ it { is_expected.to respond_to(:replication_factor) }
describe 'readable?' do
context 'when the storage is readable' do
@@ -134,4 +135,22 @@ describe Gitaly::Server do
end
end
end
+
+ describe 'replication_factor' do
+ context 'when examining for a given server' do
+ let(:storage_status) { double('storage_status', storage_name: 'default') }
+
+ before do
+ response = double('response', storage_statuses: [storage_status])
+ allow_next_instance_of(Gitlab::GitalyClient::ServerService) do |instance|
+ allow(instance).to receive(:info).and_return(response)
+ end
+ end
+
+ it do
+ allow(storage_status).to receive(:replication_factor).and_return(2)
+ expect(server.replication_factor).to eq(2)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/alert_management/alert_params_spec.rb b/spec/lib/gitlab/alert_management/alert_params_spec.rb
index 5cf34038f68..284af421f05 100644
--- a/spec/lib/gitlab/alert_management/alert_params_spec.rb
+++ b/spec/lib/gitlab/alert_management/alert_params_spec.rb
@@ -32,7 +32,8 @@ describe Gitlab::AlertManagement::AlertParams do
severity: 'critical',
hosts: ['gitlab.com'],
payload: payload,
- started_at: started_at
+ started_at: started_at,
+ fingerprint: nil
)
end
diff --git a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
index 816ed918fe8..728cbf11cda 100644
--- a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
+++ b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
@@ -50,6 +50,19 @@ describe Gitlab::AlertManagement::AlertStatusCounts do
expect(counts.acknowledged).to eq(0)
end
end
+
+ context 'when search param is included' do
+ let(:params) { { search: alert_1.title } }
+
+ it 'returns the correct countss' do
+ expect(counts.open).to eq(0)
+ expect(counts.all).to eq(1)
+ expect(counts.resolved).to eq(1)
+ expect(counts.ignored).to eq(0)
+ expect(counts.triggered).to eq(0)
+ expect(counts.acknowledged).to eq(0)
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/alert_management/fingerprint_spec.rb b/spec/lib/gitlab/alert_management/fingerprint_spec.rb
new file mode 100644
index 00000000000..7865d667f71
--- /dev/null
+++ b/spec/lib/gitlab/alert_management/fingerprint_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::AlertManagement::Fingerprint do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:alert) { create(:alert_management_alert) }
+
+ describe '.generate' do
+ subject { described_class.generate(data) }
+
+ context 'when data is an array' do
+ let(:data) { [1, 'fingerprint', 'given'] }
+
+ it 'flattens the array' do
+ expect_next_instance_of(described_class) do |obj|
+ expect(obj).to receive(:flatten_array)
+ end
+
+ subject
+ end
+
+ it 'returns the hashed fingerprint' do
+ expected_fingerprint = Digest::SHA1.hexdigest(data.flatten.map!(&:to_s).join)
+ expect(subject).to eq(expected_fingerprint)
+ end
+ end
+
+ context 'when data is a non-array type' do
+ where(:data) do
+ [
+ 111,
+ 'fingerprint',
+ :fingerprint,
+ true,
+ { test: true }
+ ]
+ end
+
+ with_them do
+ it 'performs like a hashed fingerprint' do
+ expect(subject).to eq(Digest::SHA1.hexdigest(data.to_s))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb
index a0582515f3d..d582ff6f32a 100644
--- a/spec/lib/gitlab/alerting/alert_spec.rb
+++ b/spec/lib/gitlab/alerting/alert_spec.rb
@@ -253,7 +253,7 @@ describe Gitlab::Alerting::Alert do
include_context 'gitlab alert'
it 'returns a fingerprint' do
- plain_fingerprint = [alert.metric_id, alert.starts_at].join('/')
+ plain_fingerprint = [alert.metric_id, alert.starts_at_raw].join('/')
is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
end
@@ -263,7 +263,7 @@ describe Gitlab::Alerting::Alert do
include_context 'full query'
it 'returns a fingerprint' do
- plain_fingerprint = [alert.starts_at, alert.title, alert.full_query].join('/')
+ plain_fingerprint = [alert.starts_at_raw, alert.title, alert.full_query].join('/')
is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
end
diff --git a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
index f32095b3c86..889efae9585 100644
--- a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
+++ b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
describe Gitlab::Alerting::NotificationPayloadParser do
describe '.call' do
@@ -89,6 +89,39 @@ describe Gitlab::Alerting::NotificationPayloadParser do
end
end
+ context 'with fingerprint' do
+ before do
+ payload[:fingerprint] = data
+ end
+
+ shared_examples 'fingerprint generation' do
+ it 'generates the fingerprint correctly' do
+ expect(result).to eq(Gitlab::AlertManagement::Fingerprint.generate(data))
+ end
+ end
+
+ context 'with blank fingerprint' do
+ it_behaves_like 'fingerprint generation' do
+ let(:data) { ' ' }
+ let(:result) { subject.dig('annotations', 'fingerprint') }
+ end
+ end
+
+ context 'with fingerprint given' do
+ it_behaves_like 'fingerprint generation' do
+ let(:data) { 'fingerprint' }
+ let(:result) { subject.dig('annotations', 'fingerprint') }
+ end
+ end
+
+ context 'with array fingerprint given' do
+ it_behaves_like 'fingerprint generation' do
+ let(:data) { [1, 'fingerprint', 'given'] }
+ let(:result) { subject.dig('annotations', 'fingerprint') }
+ end
+ end
+ end
+
context 'when payload attributes have blank lines' do
let(:payload) do
{
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
index 0fc9d3c1e9e..250e2f16aec 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
@@ -6,7 +6,8 @@ describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:mr1) { create(:merge_request, target_project: project, source_project: project, allow_broken: true, created_at: 3.months.ago) }
let_it_be(:mr2) { create(:merge_request, target_project: project, source_project: project, allow_broken: true, created_at: 1.month.ago) }
- let(:params) { {} }
+ let_it_be(:user) { create(:user) }
+ let(:params) { { current_user: user } }
let(:records) do
stage = build(:cycle_analytics_project_stage, {
start_event_identifier: :merge_request_created,
@@ -17,6 +18,7 @@ describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
end
before do
+ project.add_maintainer(user)
mr1.metrics.update!(merged_at: 1.month.ago)
mr2.metrics.update!(merged_at: Time.now)
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index 334cab0b799..e3429b0ca57 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -23,7 +23,7 @@ describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
describe '#serialized_records' do
shared_context 'when records are loaded by maintainer' do
before do
- project.add_user(user, Gitlab::Access::MAINTAINER)
+ project.add_user(user, Gitlab::Access::DEVELOPER)
end
it 'returns all records' do
@@ -103,6 +103,8 @@ describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
latest_build_finished_at: 7.days.ago,
pipeline: ci_build2.pipeline
})
+
+ project.add_user(user, Gitlab::Access::MAINTAINER)
end
context 'returns build records' do
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 774a87752b9..2aef206c7fd 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
describe Gitlab::Auth::AuthFinders do
include described_class
+ include HttpBasicAuthHelpers
let(:user) { create(:user) }
let(:env) do
@@ -22,10 +23,7 @@ describe Gitlab::Auth::AuthFinders do
end
def set_basic_auth_header(username, password)
- set_header(
- 'HTTP_AUTHORIZATION',
- ActionController::HttpAuthentication::Basic.encode_credentials(username, password)
- )
+ env.merge!(basic_auth_header(username, password))
end
describe '#find_user_from_warden' do
@@ -653,6 +651,24 @@ describe Gitlab::Auth::AuthFinders do
it_behaves_like 'job token params', described_class::JOB_TOKEN_PARAM
it_behaves_like 'job token params', described_class::RUNNER_JOB_TOKEN_PARAM
end
+
+ context 'when the job token is provided via basic auth' do
+ let(:route_authentication_setting) { { job_token_allowed: :basic_auth } }
+ let(:username) { Ci::Build::CI_REGISTRY_USER }
+ let(:token) { job.token }
+
+ before do
+ set_basic_auth_header(username, token)
+ end
+
+ it { is_expected.to eq(user) }
+
+ context 'credentials are provided but route setting is incorrect' do
+ let(:route_authentication_setting) { { job_token_allowed: :unknown } }
+
+ it { is_expected.to be_nil }
+ end
+ end
end
describe '#find_runner_from_token' do
diff --git a/spec/lib/gitlab/auth/ldap/person_spec.rb b/spec/lib/gitlab/auth/ldap/person_spec.rb
index e90917cfce1..403a48d40ef 100644
--- a/spec/lib/gitlab/auth/ldap/person_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/person_spec.rb
@@ -57,14 +57,17 @@ describe Gitlab::Auth::Ldap::Person do
'attributes' => {
'name' => 'cn',
'email' => 'mail',
- 'username' => %w(uid mail memberof)
+ 'username' => %w(uid mail),
+ 'first_name' => ''
}
}
)
config = Gitlab::Auth::Ldap::Config.new('ldapmain')
ldap_attributes = described_class.ldap_attributes(config)
- expect(ldap_attributes).to match_array(%w(dn uid cn mail memberof))
+ expect(ldap_attributes).to include('dn', 'uid', 'cn', 'mail')
+ expect(ldap_attributes).to be_present
+ expect(ldap_attributes.uniq!).to eq(nil)
end
end
diff --git a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
index eecd290e3ca..9dd97b58014 100644
--- a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
+++ b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
@@ -54,6 +54,10 @@ describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgressChec
end
context 'when there are no scheduled, or retrying or dead' do
+ before do
+ stub_feature_flags(multiple_merge_request_assignees: false)
+ end
+
it 'enables feature' do
allow(Gitlab::BackgroundMigration).to receive(:exists?)
.with('PopulateMergeRequestAssigneesTable')
@@ -67,9 +71,9 @@ describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgressChec
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
- expect(Feature).to receive(:enable).with(:multiple_merge_request_assignees)
-
described_class.new.perform
+
+ expect(Feature.enabled?(:multiple_merge_request_assignees)).to eq(true)
end
end
diff --git a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb b/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
index 40340f89448..e057aea6bb3 100644
--- a/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
+++ b/spec/lib/gitlab/background_migration/reset_merge_status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::ResetMergeStatus, schema: 20190528180441 do
+describe Gitlab::BackgroundMigration::ResetMergeStatus do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
@@ -23,24 +23,24 @@ describe Gitlab::BackgroundMigration::ResetMergeStatus, schema: 20190528180441 d
end
it 'correctly updates opened mergeable MRs to unchecked' do
- create_merge_request(1, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(2, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(3, state: 'opened', merge_status: 'can_be_merged')
- create_merge_request(4, state: 'merged', merge_status: 'can_be_merged')
- create_merge_request(5, state: 'opened', merge_status: 'cannot_be_merged')
+ create_merge_request(1, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
+ create_merge_request(2, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
+ create_merge_request(3, state_id: MergeRequest.available_states[:opened], merge_status: 'can_be_merged')
+ create_merge_request(4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged')
+ create_merge_request(5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged')
subject.perform(1, 5)
expected_rows = [
- { id: 1, state: 'opened', merge_status: 'unchecked' },
- { id: 2, state: 'opened', merge_status: 'unchecked' },
- { id: 3, state: 'opened', merge_status: 'unchecked' },
- { id: 4, state: 'merged', merge_status: 'can_be_merged' },
- { id: 5, state: 'opened', merge_status: 'cannot_be_merged' }
+ { id: 1, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
+ { id: 2, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
+ { id: 3, state_id: MergeRequest.available_states[:opened], merge_status: 'unchecked' },
+ { id: 4, state_id: MergeRequest.available_states[:merged], merge_status: 'can_be_merged' },
+ { id: 5, state_id: MergeRequest.available_states[:opened], merge_status: 'cannot_be_merged' }
]
rows = merge_requests.order(:id).map do |row|
- row.attributes.slice('id', 'state', 'merge_status').symbolize_keys
+ row.attributes.slice('id', 'state_id', 'merge_status').symbolize_keys
end
expect(rows).to eq(expected_rows)
diff --git a/spec/lib/gitlab/badge/coverage/report_spec.rb b/spec/lib/gitlab/badge/coverage/report_spec.rb
index 560072a3d83..284ca53a996 100644
--- a/spec/lib/gitlab/badge/coverage/report_spec.rb
+++ b/spec/lib/gitlab/badge/coverage/report_spec.rb
@@ -7,7 +7,7 @@ describe Gitlab::Badge::Coverage::Report do
let(:job_name) { nil }
let(:badge) do
- described_class.new(project, 'master', job_name)
+ described_class.new(project, 'master', opts: { job: job_name })
end
describe '#entity' do
diff --git a/spec/lib/gitlab/badge/coverage/template_spec.rb b/spec/lib/gitlab/badge/coverage/template_spec.rb
index b51d707a61d..3940b37830e 100644
--- a/spec/lib/gitlab/badge/coverage/template_spec.rb
+++ b/spec/lib/gitlab/badge/coverage/template_spec.rb
@@ -3,13 +3,33 @@
require 'spec_helper'
describe Gitlab::Badge::Coverage::Template do
- let(:badge) { double(entity: 'coverage', status: 90.00) }
+ let(:badge) { double(entity: 'coverage', status: 90.00, customization: {}) }
let(:template) { described_class.new(badge) }
describe '#key_text' do
- it 'is always says coverage' do
+ it 'says coverage by default' do
expect(template.key_text).to eq 'coverage'
end
+
+ context 'when custom key_text is defined' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_text: "custom text" })
+ end
+
+ it 'returns custom value' do
+ expect(template.key_text).to eq "custom text"
+ end
+
+ context 'when its size is larger than the max allowed value' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_text: 't' * 129 })
+ end
+
+ it 'returns default value' do
+ expect(template.key_text).to eq 'coverage'
+ end
+ end
+ end
end
describe '#value_text' do
@@ -41,9 +61,29 @@ describe Gitlab::Badge::Coverage::Template do
end
describe '#key_width' do
- it 'has a fixed key width' do
+ it 'is fixed by default' do
expect(template.key_width).to eq 62
end
+
+ context 'when custom key_width is defined' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_width: 101 })
+ end
+
+ it 'returns custom value' do
+ expect(template.key_width).to eq 101
+ end
+
+ context 'when it is larger than the max allowed value' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_width: 129 })
+ end
+
+ it 'returns default value' do
+ expect(template.key_width).to eq 62
+ end
+ end
+ end
end
describe '#value_width' do
diff --git a/spec/lib/gitlab/badge/pipeline/template_spec.rb b/spec/lib/gitlab/badge/pipeline/template_spec.rb
index da95c7219a4..751a5d6645e 100644
--- a/spec/lib/gitlab/badge/pipeline/template_spec.rb
+++ b/spec/lib/gitlab/badge/pipeline/template_spec.rb
@@ -3,13 +3,33 @@
require 'spec_helper'
describe Gitlab::Badge::Pipeline::Template do
- let(:badge) { double(entity: 'pipeline', status: 'success') }
+ let(:badge) { double(entity: 'pipeline', status: 'success', customization: {}) }
let(:template) { described_class.new(badge) }
describe '#key_text' do
- it 'is always says pipeline' do
+ it 'says pipeline by default' do
expect(template.key_text).to eq 'pipeline'
end
+
+ context 'when custom key_text is defined' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_text: 'custom text' })
+ end
+
+ it 'returns custom value' do
+ expect(template.key_text).to eq 'custom text'
+ end
+
+ context 'when its size is larger than the max allowed value' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_text: 't' * 129 })
+ end
+
+ it 'returns default value' do
+ expect(template.key_text).to eq 'pipeline'
+ end
+ end
+ end
end
describe '#value_text' do
@@ -18,6 +38,32 @@ describe Gitlab::Badge::Pipeline::Template do
end
end
+ describe '#key_width' do
+ it 'is fixed by default' do
+ expect(template.key_width).to eq 62
+ end
+
+ context 'when custom key_width is defined' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_width: 101 })
+ end
+
+ it 'returns custom value' do
+ expect(template.key_width).to eq 101
+ end
+
+ context 'when it is larger than the max allowed value' do
+ before do
+ allow(badge).to receive(:customization).and_return({ key_width: 129 })
+ end
+
+ it 'returns default value' do
+ expect(template.key_width).to eq 62
+ end
+ end
+ end
+ end
+
describe 'widths and text anchors' do
it 'has fixed width and text anchors' do
expect(template.width).to eq 116
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index b3c1f86c5ee..137d0fd4f9e 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -190,11 +190,14 @@ describe Gitlab::BitbucketImport::Importer do
context 'when importing a pull request throws an exception' do
before do
- allow(pull_request).to receive(:raw).and_return('hello world')
+ allow(pull_request).to receive(:raw).and_return({ error: "broken" })
allow(subject.client).to receive(:pull_request_comments).and_raise(Gitlab::HTTP::Error)
end
it 'logs an error without the backtrace' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception)
+ .with(instance_of(Gitlab::HTTP::Error), hash_including(raw_response: '{"error":"broken"}'))
+
subject.execute
expect(subject.errors.count).to eq(1)
diff --git a/spec/lib/gitlab/cache/import/caching_spec.rb b/spec/lib/gitlab/cache/import/caching_spec.rb
index e4aec0f4dec..7b4308d32ae 100644
--- a/spec/lib/gitlab/cache/import/caching_spec.rb
+++ b/spec/lib/gitlab/cache/import/caching_spec.rb
@@ -89,7 +89,7 @@ describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
end
describe '.write_multiple' do
- it 'sets multiple keys' do
+ it 'sets multiple keys when key_prefix not set' do
mapping = { 'foo' => 10, 'bar' => 20 }
described_class.write_multiple(mapping)
@@ -101,6 +101,19 @@ describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache do
expect(found).to eq(value.to_s)
end
end
+
+ it 'sets multiple keys with correct prefix' do
+ mapping = { 'foo' => 10, 'bar' => 20 }
+
+ described_class.write_multiple(mapping, key_prefix: 'pref/')
+
+ mapping.each do |key, value|
+ full_key = described_class.cache_key_for("pref/#{key}")
+ found = Gitlab::Redis::Cache.with { |r| r.get(full_key) }
+
+ expect(found).to eq(value.to_s)
+ end
+ end
end
describe '.expire' do
diff --git a/spec/lib/gitlab/chat_spec.rb b/spec/lib/gitlab/chat_spec.rb
index 08cc16314c5..be606fe6db1 100644
--- a/spec/lib/gitlab/chat_spec.rb
+++ b/spec/lib/gitlab/chat_spec.rb
@@ -5,19 +5,13 @@ require 'spec_helper'
describe Gitlab::Chat, :use_clean_rails_memory_store_caching do
describe '.available?' do
it 'returns true when the chatops feature is available' do
- allow(Feature)
- .to receive(:enabled?)
- .with(:chatops, default_enabled: true)
- .and_return(true)
+ stub_feature_flags(chatops: true)
expect(described_class).to be_available
end
it 'returns false when the chatops feature is not available' do
- allow(Feature)
- .to receive(:enabled?)
- .with(:chatops, default_enabled: true)
- .and_return(false)
+ stub_feature_flags(chatops: false)
expect(described_class).not_to be_available
end
diff --git a/spec/lib/gitlab/ci/build/credentials/factory_spec.rb b/spec/lib/gitlab/ci/build/credentials/factory_spec.rb
index 848adb2e6e5..159f89f4985 100644
--- a/spec/lib/gitlab/ci/build/credentials/factory_spec.rb
+++ b/spec/lib/gitlab/ci/build/credentials/factory_spec.rb
@@ -7,11 +7,13 @@ describe Gitlab::Ci::Build::Credentials::Factory do
subject { described_class.new(build).create! }
- class TestProvider
- def initialize(build); end
- end
-
before do
+ stub_const('TestProvider', Class.new)
+
+ TestProvider.class_eval do
+ def initialize(build); end
+ end
+
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:providers).and_return([TestProvider])
end
diff --git a/spec/lib/gitlab/ci/build/releaser_spec.rb b/spec/lib/gitlab/ci/build/releaser_spec.rb
new file mode 100644
index 00000000000..2f7bca777dd
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/releaser_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Releaser do
+ subject { described_class.new(config: config[:release]).script }
+
+ describe '#script' do
+ context 'all nodes' do
+ let(:config) do
+ {
+ release: {
+ name: 'Release $CI_COMMIT_SHA',
+ description: 'Created using the release-cli $EXTRA_DESCRIPTION',
+ tag_name: 'release-$CI_COMMIT_SHA',
+ ref: '$CI_COMMIT_SHA'
+ }
+ }
+ end
+
+ it 'generates the script' do
+ expect(subject).to eq('release-cli create --name "Release $CI_COMMIT_SHA" --description "Created using the release-cli $EXTRA_DESCRIPTION" --tag-name "release-$CI_COMMIT_SHA" --ref "$CI_COMMIT_SHA"')
+ end
+ end
+
+ context 'individual nodes' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:node_name, :node_value, :result) do
+ 'name' | 'Release $CI_COMMIT_SHA' | 'release-cli create --name "Release $CI_COMMIT_SHA"'
+ 'description' | 'Release-cli $EXTRA_DESCRIPTION' | 'release-cli create --description "Release-cli $EXTRA_DESCRIPTION"'
+ 'tag_name' | 'release-$CI_COMMIT_SHA' | 'release-cli create --tag-name "release-$CI_COMMIT_SHA"'
+ 'ref' | '$CI_COMMIT_SHA' | 'release-cli create --ref "$CI_COMMIT_SHA"'
+ end
+
+ with_them do
+ let(:config) do
+ {
+ release: {
+ node_name => node_value
+ }
+ }
+ end
+
+ it 'generates the script' do
+ expect(subject).to eq(result)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/step_spec.rb b/spec/lib/gitlab/ci/build/step_spec.rb
index 9c1a8cf5e91..1cebda2cc7e 100644
--- a/spec/lib/gitlab/ci/build/step_spec.rb
+++ b/spec/lib/gitlab/ci/build/step_spec.rb
@@ -51,6 +51,30 @@ describe Gitlab::Ci::Build::Step do
end
end
+ describe '#from_release' do
+ subject { described_class.from_release(job) }
+
+ before do
+ job.run!
+ end
+
+ context 'with release' do
+ let(:job) { create(:ci_build, :release_options) }
+
+ it 'returns the release-cli command line' do
+ expect(subject.script).to eq("release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\"")
+ end
+ end
+
+ context 'when release is empty' do
+ let(:job) { create(:ci_build) }
+
+ it 'does not fabricate an object' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
describe '#from_after_script' do
let(:job) { create(:ci_build) }
@@ -61,7 +85,7 @@ describe Gitlab::Ci::Build::Step do
end
context 'when after_script is empty' do
- it 'doesn not fabricate an object' do
+ it 'does not fabricate an object' do
is_expected.to be_nil
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 8c6c91d919e..2c12a88dedb 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -37,6 +37,7 @@ describe Gitlab::Ci::Config::Entry::Reports do
:junit | 'junit.xml'
:codequality | 'gl-code-quality-report.json'
:sast | 'gl-sast-report.json'
+ :secret_detection | 'gl-secret-detection-report.json'
:dependency_scanning | 'gl-dependency-scanning-report.json'
:container_scanning | 'gl-container-scanning-report.json'
:dast | 'gl-dast-report.json'
diff --git a/spec/lib/gitlab/ci/config/entry/retry_spec.rb b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
index bb3c0b0004d..67253c71f6b 100644
--- a/spec/lib/gitlab/ci/config/entry/retry_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/retry_spec.rb
@@ -94,7 +94,7 @@ describe Gitlab::Ci::Config::Entry::Retry do
# sure this is catched, check explicitly that all of the documented
# values are valid. If they are not it means the documentation and this
# array must be updated.
- RETRY_WHEN_IN_DOCUMENTATION = %w[
+ retry_when_in_documentation = %w[
always
unknown_failure
script_failure
@@ -111,7 +111,7 @@ describe Gitlab::Ci::Config::Entry::Retry do
data_integrity_failure
].freeze
- RETRY_WHEN_IN_DOCUMENTATION.each do |reason|
+ retry_when_in_documentation.each do |reason|
context "with when from documentation `#{reason}`" do
let(:when) { reason }
diff --git a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
index 19cd75e586c..fec27c0f31a 100644
--- a/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/terraform/tfplan_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::Ci::Parsers::Terraform::Tfplan do
let(:reports) { Gitlab::Ci::Reports::TerraformReports.new }
- context 'when data is tfplan.json' do
+ context 'when data is invalid' do
context 'when there is no data' do
it 'raises an error' do
plan = '{}'
@@ -19,31 +19,67 @@ describe Gitlab::Ci::Parsers::Terraform::Tfplan do
end
end
- context 'when there is data' do
- it 'parses JSON and returns a report' do
- plan = '{ "create": 0, "update": 1, "delete": 0 }'
+ context 'when data is not a JSON file' do
+ it 'raises an error' do
+ plan = { 'create' => 0, 'update' => 1, 'delete' => 0 }.to_s
- expect { subject.parse!(plan, reports, artifact: artifact) }.not_to raise_error
+ expect { subject.parse!(plan, reports, artifact: artifact) }.to raise_error(
+ described_class::TfplanParserError
+ )
+ end
+ end
- expect(reports.plans).to match(
- a_hash_including(
- 'tfplan.json' => a_hash_including(
- 'create' => 0,
- 'update' => 1,
- 'delete' => 0
- )
- )
+ context 'when JSON is missing a required key' do
+ it 'raises an error' do
+ plan = '{ "wrong_key": 1 }'
+
+ expect { subject.parse!(plan, reports, artifact: artifact) }.to raise_error(
+ described_class::TfplanParserError
)
end
end
end
- context 'when data is not tfplan.json' do
- it 'raises an error' do
- plan = { 'create' => 0, 'update' => 1, 'delete' => 0 }.to_s
+ context 'when data is valid' do
+ it 'parses JSON and returns a report' do
+ plan = '{ "create": 0, "update": 1, "delete": 0 }'
+
+ expect { subject.parse!(plan, reports, artifact: artifact) }.not_to raise_error
- expect { subject.parse!(plan, reports, artifact: artifact) }.to raise_error(
- described_class::TfplanParserError
+ reports.plans.each do |key, hash_value|
+ expect(hash_value.keys).to match_array(%w[create delete job_name job_path update])
+ end
+
+ expect(reports.plans).to match(
+ a_hash_including(
+ artifact.job.id.to_s => a_hash_including(
+ 'create' => 0,
+ 'update' => 1,
+ 'delete' => 0,
+ 'job_name' => artifact.job.options.dig(:artifacts, :name).to_s
+ )
+ )
+ )
+ end
+
+ it 'parses JSON when extra keys are present' do
+ plan = '{ "create": 0, "update": 1, "delete": 0, "extra_key": 4 }'
+
+ expect { subject.parse!(plan, reports, artifact: artifact) }.not_to raise_error
+
+ reports.plans.each do |key, hash_value|
+ expect(hash_value.keys).to match_array(%w[create delete job_name job_path update])
+ end
+
+ expect(reports.plans).to match(
+ a_hash_including(
+ artifact.job.id.to_s => a_hash_including(
+ 'create' => 0,
+ 'update' => 1,
+ 'delete' => 0,
+ 'job_name' => artifact.job.options.dig(:artifacts, :name).to_s
+ )
+ )
)
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index aa54f19b26c..1e1d5c2a724 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -39,6 +39,10 @@ describe Gitlab::Ci::Pipeline::Chain::Seed do
expect(pipeline.iid).to be_present
end
+ it 'ensures ci_ref' do
+ expect(pipeline.ci_ref).to be_present
+ end
+
it 'sets the seeds in the command object' do
expect(command.stage_seeds).to all(be_a Gitlab::Ci::Pipeline::Seed::Base)
expect(command.stage_seeds.count).to eq 1
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
index f82e49f9323..ea04862ed74 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -56,11 +56,24 @@ describe Gitlab::Ci::Pipeline::Chain::Sequence do
end
it 'adds sequence duration to duration histogram' do
- allow(command).to receive(:duration_histogram).and_return(histogram)
+ allow(command.metrics)
+ .to receive(:pipeline_creation_duration_histogram)
+ .and_return(histogram)
subject.build!
expect(histogram).to have_received(:observe)
end
+
+ it 'records pipeline size by pipeline source in a histogram' do
+ allow(command.metrics)
+ .to receive(:pipeline_size_histogram)
+ .and_return(histogram)
+
+ subject.build!
+
+ expect(histogram).to have_received(:observe)
+ .with({ source: 'push' }, 0)
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
index fe19244659f..f5b43b5aeab 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -134,7 +134,7 @@ describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
it_behaves_like 'foo/bar directory key'
end
- context 'with directories ending in slash star' do
+ context 'with directories ending in slash star', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/222356' do
let(:files) { ['foo/bar/*'] }
it_behaves_like 'foo/bar directory key'
diff --git a/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb b/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb
index 061029299ac..bfab30543ed 100644
--- a/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/terraform_reports_spec.rb
@@ -10,23 +10,23 @@ describe Gitlab::Ci::Reports::TerraformReports do
describe '#add_plan' do
context 'when providing two unique plans' do
it 'returns two plans' do
- subject.add_plan('a/tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 })
- subject.add_plan('b/tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 })
+ subject.add_plan('123', { 'create' => 1, 'update' => 2, 'delete' => 3 })
+ subject.add_plan('456', { 'create' => 4, 'update' => 5, 'delete' => 6 })
expect(subject.plans).to eq({
- 'a/tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 },
- 'b/tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 }
+ '123' => { 'create' => 1, 'update' => 2, 'delete' => 3 },
+ '456' => { 'create' => 4, 'update' => 5, 'delete' => 6 }
})
end
end
context 'when providing the same plan twice' do
it 'returns the last added plan' do
- subject.add_plan('tfplan.json', { 'create' => 0, 'update' => 0, 'delete' => 0 })
- subject.add_plan('tfplan.json', { 'create' => 0, 'update' => 1, 'delete' => 0 })
+ subject.add_plan('123', { 'create' => 0, 'update' => 0, 'delete' => 0 })
+ subject.add_plan('123', { 'create' => 1, 'update' => 2, 'delete' => 3 })
expect(subject.plans).to eq({
- 'tfplan.json' => { 'create' => 0, 'update' => 1, 'delete' => 0 }
+ '123' => { 'create' => 1, 'update' => 2, 'delete' => 3 }
})
end
end
diff --git a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
index 1f417781988..6c67864855d 100644
--- a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
@@ -59,12 +59,13 @@ describe Gitlab::Ci::Status::Bridge::Factory do
context 'failed with downstream_pipeline_creation_failed' do
before do
+ bridge.options = { downstream_errors: ['No stages / jobs for this pipeline.', 'other error'] }
bridge.failure_reason = 'downstream_pipeline_creation_failed'
end
it 'fabricates correct status_tooltip' do
expect(status.status_tooltip).to eq(
- "#{s_('CiStatusText|failed')} - (downstream pipeline can not be created)"
+ "#{s_('CiStatusText|failed')} - (downstream pipeline can not be created, No stages / jobs for this pipeline., other error)"
)
end
end
diff --git a/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
index b0113b00ef0..bdcbfed918f 100644
--- a/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
+++ b/spec/lib/gitlab/ci/status/stage/play_manual_spec.rb
@@ -31,7 +31,7 @@ describe Gitlab::Ci::Status::Stage::PlayManual do
subject { play_manual.action_path }
- it { is_expected.to eq("/#{pipeline.project.full_path}/pipelines/#{pipeline.id}/stages/#{stage.name}/play_manual") }
+ it { is_expected.to eq("/#{pipeline.project.full_path}/-/pipelines/#{pipeline.id}/stages/#{stage.name}/play_manual") }
end
describe '#action_method' do
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index c93bb901981..1668149d8f5 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1388,7 +1388,7 @@ module Gitlab
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
let(:config) do
{
- stages: ["build", "test", "release"], # rubocop:disable Style/WordArray
+ stages: %w[build test release],
release: {
stage: "release",
only: ["tags"],
diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
new file mode 100644
index 00000000000..01cc0b30784
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Cleanup::OrphanLfsFileReferences do
+ let(:null_logger) { Logger.new('/dev/null') }
+ let(:project) { create(:project, :repository, lfs_enabled: true) }
+ let(:lfs_object) { create(:lfs_object) }
+
+ let!(:invalid_reference) { create(:lfs_objects_project, project: project, lfs_object: lfs_object) }
+
+ before do
+ allow(null_logger).to receive(:info)
+
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+
+ # Create a valid reference
+ oid = project.repository.gitaly_blob_client.get_all_lfs_pointers.first.lfs_oid
+ lfs_object2 = create(:lfs_object, oid: oid)
+ create(:lfs_objects_project, project: project, lfs_object: lfs_object2)
+ end
+
+ context 'dry run' do
+ it 'prints messages and does not delete references' do
+ expect(null_logger).to receive(:info).with("[DRY RUN] Looking for orphan LFS files for project #{project.name_with_namespace}")
+ expect(null_logger).to receive(:info).with("[DRY RUN] Found invalid references: 1")
+
+ expect { described_class.new(project, logger: null_logger).run! }
+ .not_to change { project.lfs_objects.count }
+ end
+ end
+
+ context 'regular run' do
+ it 'prints messages and deletes invalid reference' do
+ expect(null_logger).to receive(:info).with("Looking for orphan LFS files for project #{project.name_with_namespace}")
+ expect(null_logger).to receive(:info).with("Removed invalid references: 1")
+ expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:lfs_objects_size])
+
+ expect { described_class.new(project, logger: null_logger, dry_run: false).run! }
+ .to change { project.lfs_objects.count }.from(2).to(1)
+
+ expect(LfsObjectsProject.exists?(invalid_reference.id)).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
index b8ac8c5b95c..864529a6bf6 100644
--- a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
@@ -5,7 +5,9 @@ require 'spec_helper'
# For easier debugging set `PUMA_DEBUG=1`
describe Gitlab::Cluster::Mixins::PumaCluster do
- PUMA_STARTUP_TIMEOUT = 30
+ before do
+ stub_const('PUMA_STARTUP_TIMEOUT', 30)
+ end
context 'when running Puma in Cluster-mode' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
index ebe019924d5..3965eb722a0 100644
--- a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
@@ -5,7 +5,9 @@ require 'spec_helper'
# For easier debugging set `UNICORN_DEBUG=1`
describe Gitlab::Cluster::Mixins::UnicornHttpServer do
- UNICORN_STARTUP_TIMEOUT = 30
+ before do
+ stub_const('UNICORN_STARTUP_TIMEOUT', 30)
+ end
context 'when running Unicorn' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/code_navigation_path_spec.rb b/spec/lib/gitlab/code_navigation_path_spec.rb
index 938a2f821fd..07d4dfba622 100644
--- a/spec/lib/gitlab/code_navigation_path_spec.rb
+++ b/spec/lib/gitlab/code_navigation_path_spec.rb
@@ -12,12 +12,25 @@ describe Gitlab::CodeNavigationPath do
let(:commit_sha) { sha }
let(:path) { 'lib/app.rb' }
+ let(:lsif_path) { "/#{project.full_path}/-/jobs/#{job.id}/artifacts/raw/lsif/#{path}.json?file_type=lsif" }
subject { described_class.new(project, commit_sha).full_json_path_for(path) }
+ before do
+ stub_feature_flags(code_navigation: project)
+ end
+
context 'when a pipeline exist for a sha' do
it 'returns path to a file in the artifact' do
- expect(subject).to eq("/#{project.full_path}/-/jobs/#{job.id}/artifacts/raw/lsif/#{path}.json?file_type=lsif")
+ expect(subject).to eq(lsif_path)
+ end
+
+ context 'when passed commit sha is nil' do
+ let(:commit_sha) { nil }
+
+ it 'returns path to a file in the artifact' do
+ expect(subject).to eq(lsif_path)
+ end
end
end
@@ -25,7 +38,7 @@ describe Gitlab::CodeNavigationPath do
let(:commit_sha) { project.commit.id }
it 'returns path to a file in the artifact' do
- expect(subject).to eq("/#{project.full_path}/-/jobs/#{job.id}/artifacts/raw/lsif/#{path}.json?file_type=lsif")
+ expect(subject).to eq(lsif_path)
end
end
diff --git a/spec/lib/gitlab/config/entry/factory_spec.rb b/spec/lib/gitlab/config/entry/factory_spec.rb
index a614ef56a78..81ca5f2cba1 100644
--- a/spec/lib/gitlab/config/entry/factory_spec.rb
+++ b/spec/lib/gitlab/config/entry/factory_spec.rb
@@ -4,11 +4,14 @@ require 'spec_helper'
describe Gitlab::Config::Entry::Factory do
describe '#create!' do
- class Script < Gitlab::Config::Entry::Node
- include Gitlab::Config::Entry::Validatable
+ before do
+ stub_const('Script', Class.new(Gitlab::Config::Entry::Node))
+ Script.class_eval do
+ include Gitlab::Config::Entry::Validatable
- validations do
- validates :config, array_of_strings: true
+ validations do
+ validates :config, array_of_strings: true
+ end
end
end
diff --git a/spec/lib/gitlab/config/loader/yaml_spec.rb b/spec/lib/gitlab/config/loader/yaml_spec.rb
index a52c1c362e1..623fe927233 100644
--- a/spec/lib/gitlab/config/loader/yaml_spec.rb
+++ b/spec/lib/gitlab/config/loader/yaml_spec.rb
@@ -5,6 +5,16 @@ require 'spec_helper'
describe Gitlab::Config::Loader::Yaml do
let(:loader) { described_class.new(yml) }
+ let(:yml) do
+ <<~YAML
+ image: 'ruby:2.7'
+ texts:
+ nested_key: 'value1'
+ more_text:
+ more_nested_key: 'value2'
+ YAML
+ end
+
context 'when yaml syntax is correct' do
let(:yml) { 'image: ruby:2.7' }
@@ -61,6 +71,15 @@ describe Gitlab::Config::Loader::Yaml do
expect(loader).not_to be_valid
end
end
+
+ describe '#load_raw!' do
+ it 'raises error' do
+ expect { loader.load_raw! }.to raise_error(
+ Gitlab::Config::Loader::FormatError,
+ 'Invalid configuration format'
+ )
+ end
+ end
end
# Prevent Billion Laughs attack: https://gitlab.com/gitlab-org/gitlab-foss/issues/56018
@@ -123,4 +142,32 @@ describe Gitlab::Config::Loader::Yaml do
end
end
end
+
+ describe '#load_raw!' do
+ it 'loads keys as strings' do
+ expect(loader.load_raw!).to eq(
+ 'image' => 'ruby:2.7',
+ 'texts' => {
+ 'nested_key' => 'value1',
+ 'more_text' => {
+ 'more_nested_key' => 'value2'
+ }
+ }
+ )
+ end
+ end
+
+ describe '#load!' do
+ it 'symbolizes keys' do
+ expect(loader.load!).to eq(
+ image: 'ruby:2.7',
+ texts: {
+ nested_key: 'value1',
+ more_text: {
+ more_nested_key: 'value2'
+ }
+ }
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb
index 1154f029a8d..97742a3e815 100644
--- a/spec/lib/gitlab/contributions_calendar_spec.rb
+++ b/spec/lib/gitlab/contributions_calendar_spec.rb
@@ -42,7 +42,7 @@ describe Gitlab::ContributionsCalendar do
described_class.new(contributor, current_user)
end
- def create_event(project, day, hour = 0, action = Event::CREATED, target_symbol = :issue)
+ def create_event(project, day, hour = 0, action = :created, target_symbol = :issue)
@targets ||= {}
@targets[project] ||= create(target_symbol, project: project, author: contributor)
@@ -77,14 +77,14 @@ describe Gitlab::ContributionsCalendar do
end
it "counts the diff notes on merge request" do
- create_event(public_project, today, 0, Event::COMMENTED, :diff_note_on_merge_request)
+ create_event(public_project, today, 0, :commented, :diff_note_on_merge_request)
expect(calendar(contributor).activity_dates[today]).to eq(1)
end
it "counts the discussions on merge requests and issues" do
- create_event(public_project, today, 0, Event::COMMENTED, :discussion_note_on_merge_request)
- create_event(public_project, today, 2, Event::COMMENTED, :discussion_note_on_issue)
+ create_event(public_project, today, 0, :commented, :discussion_note_on_merge_request)
+ create_event(public_project, today, 2, :commented, :discussion_note_on_issue)
expect(calendar(contributor).activity_dates[today]).to eq(2)
end
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index a86278871ff..ccc99017e37 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -14,19 +14,29 @@ describe Gitlab::CycleAnalytics::StageSummary do
let(:stage_summary) { described_class.new(project, options).data }
describe "#new_issues" do
- subject { stage_summary.first[:value] }
+ subject { stage_summary.first }
- it "finds the number of issues created after the 'from date'" do
- Timecop.freeze(5.days.ago) { create(:issue, project: project) }
- Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
+ context 'when from date is given' do
+ before do
+ Timecop.freeze(5.days.ago) { create(:issue, project: project) }
+ Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
+ end
- expect(subject).to eq('1')
+ it "finds the number of issues created after the 'from date'" do
+ expect(subject[:value]).to eq('1')
+ end
+
+ it 'returns the localized title' do
+ Gitlab::I18n.with_locale(:ru) do
+ expect(subject[:title]).to eq(n_('New Issue', 'New Issues', 1))
+ end
+ end
end
it "doesn't find issues from other projects" do
Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project)) }
- expect(subject).to eq('-')
+ expect(subject[:value]).to eq('-')
end
context 'when `to` parameter is given' do
@@ -38,38 +48,48 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq('-')
+ expect(subject[:value]).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq('2')
+ expect(subject[:value]).to eq('2')
end
end
end
describe "#commits" do
- subject { stage_summary.second[:value] }
+ subject { stage_summary.second }
+
+ context 'when from date is given' do
+ before do
+ Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
+ Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
+ end
- it "finds the number of commits created after the 'from date'" do
- Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
- Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
+ it "finds the number of commits created after the 'from date'" do
+ expect(subject[:value]).to eq('1')
+ end
- expect(subject).to eq('1')
+ it 'returns the localized title' do
+ Gitlab::I18n.with_locale(:ru) do
+ expect(subject[:title]).to eq(n_('Commit', 'Commits', 1))
+ end
+ end
end
it "doesn't find commits from other projects" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", create(:project, :repository), user, 'master') }
- expect(subject).to eq('-')
+ expect(subject[:value]).to eq('-')
end
it "finds a large (> 100) number of commits if present" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master', count: 100) }
- expect(subject).to eq('100')
+ expect(subject[:value]).to eq('100')
end
context 'when `to` parameter is given' do
@@ -81,14 +101,14 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq('-')
+ expect(subject[:value]).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq('2')
+ expect(subject[:value]).to eq('2')
end
end
@@ -112,13 +132,23 @@ describe Gitlab::CycleAnalytics::StageSummary do
end
describe "#deploys" do
- subject { stage_summary.third[:value] }
+ subject { stage_summary.third }
- it "finds the number of deploys made created after the 'from date'" do
- Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
- Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
+ context 'when from date is given' do
+ before do
+ Timecop.freeze(5.days.ago) { create(:deployment, :success, project: project) }
+ Timecop.freeze(5.days.from_now) { create(:deployment, :success, project: project) }
+ end
+
+ it "finds the number of deploys made created after the 'from date'" do
+ expect(subject[:value]).to eq('1')
+ end
- expect(subject).to eq('1')
+ it 'returns the localized title' do
+ Gitlab::I18n.with_locale(:ru) do
+ expect(subject[:title]).to eq(n_('Deploy', 'Deploys', 1))
+ end
+ end
end
it "doesn't find commits from other projects" do
@@ -126,7 +156,7 @@ describe Gitlab::CycleAnalytics::StageSummary do
create(:deployment, :success, project: create(:project, :repository))
end
- expect(subject).to eq('-')
+ expect(subject[:value]).to eq('-')
end
context 'when `to` parameter is given' do
@@ -138,14 +168,14 @@ describe Gitlab::CycleAnalytics::StageSummary do
it "doesn't find any record" do
options[:to] = Time.now
- expect(subject).to eq('-')
+ expect(subject[:value]).to eq('-')
end
it "finds records created between `from` and `to` range" do
options[:from] = 10.days.ago
options[:to] = 10.days.from_now
- expect(subject).to eq('2')
+ expect(subject[:value]).to eq('2')
end
end
end
diff --git a/spec/lib/gitlab/danger/changelog_spec.rb b/spec/lib/gitlab/danger/changelog_spec.rb
index 8929374fb87..130a4708cec 100644
--- a/spec/lib/gitlab/danger/changelog_spec.rb
+++ b/spec/lib/gitlab/danger/changelog_spec.rb
@@ -65,9 +65,7 @@ describe Gitlab::Danger::Changelog do
context 'added files contain a changelog' do
[
'changelogs/unreleased/entry.yml',
- 'ee/changelogs/unreleased/entry.yml',
- 'changelogs/unreleased-ee/entry.yml',
- 'ee/changelogs/unreleased-ee/entry.yml'
+ 'ee/changelogs/unreleased/entry.yml'
].each do |file_path|
let(:added_files) { [file_path] }
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index c2c881fd589..809064a540c 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -213,6 +213,7 @@ describe Gitlab::Danger::Helper do
'generator_templates/foo' | :backend
'vendor/languages.yml' | :backend
'vendor/licenses.csv' | :backend
+ 'file_hooks/examples/' | :backend
'Gemfile' | :backend
'Gemfile.lock' | :backend
@@ -233,6 +234,7 @@ describe Gitlab::Danger::Helper do
'.overcommit.yml.example' | :engineering_productivity
'.editorconfig' | :engineering_productivity
'tooling/overcommit/foo' | :engineering_productivity
+ '.codeclimate.yml' | :engineering_productivity
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | :backend
diff --git a/spec/lib/gitlab/danger/roulette_spec.rb b/spec/lib/gitlab/danger/roulette_spec.rb
index 4d41e2c45aa..b6148cd1407 100644
--- a/spec/lib/gitlab/danger/roulette_spec.rb
+++ b/spec/lib/gitlab/danger/roulette_spec.rb
@@ -6,40 +6,149 @@ require 'webmock/rspec'
require 'gitlab/danger/roulette'
describe Gitlab::Danger::Roulette do
+ let(:backend_maintainer) do
+ {
+ username: 'backend-maintainer',
+ name: 'Backend maintainer',
+ role: 'Backend engineer',
+ projects: { 'gitlab' => 'maintainer backend' }
+ }
+ end
+ let(:frontend_reviewer) do
+ {
+ username: 'frontend-reviewer',
+ name: 'Frontend reviewer',
+ role: 'Frontend engineer',
+ projects: { 'gitlab' => 'reviewer frontend' }
+ }
+ end
+ let(:frontend_maintainer) do
+ {
+ username: 'frontend-maintainer',
+ name: 'Frontend maintainer',
+ role: 'Frontend engineer',
+ projects: { 'gitlab' => "maintainer frontend" }
+ }
+ end
+ let(:software_engineer_in_test) do
+ {
+ username: 'software-engineer-in-test',
+ name: 'Software Engineer in Test',
+ role: 'Software Engineer in Test, Create:Source Code',
+ projects: {
+ 'gitlab' => 'reviewer qa',
+ 'gitlab-qa' => 'maintainer'
+ }
+ }
+ end
+ let(:engineering_productivity_reviewer) do
+ {
+ username: 'eng-prod-reviewer',
+ name: 'EP engineer',
+ role: 'Engineering Productivity',
+ projects: { 'gitlab' => 'reviewer backend' }
+ }
+ end
+
let(:teammate_json) do
- <<~JSON
[
- {
- "username": "in-gitlab-ce",
- "name": "CE maintainer",
- "projects":{ "gitlab-ce": "maintainer backend" }
- },
- {
- "username": "in-gitlab-ee",
- "name": "EE reviewer",
- "projects":{ "gitlab-ee": "reviewer frontend" }
- }
- ]
- JSON
+ backend_maintainer,
+ frontend_maintainer,
+ frontend_reviewer,
+ software_engineer_in_test,
+ engineering_productivity_reviewer
+ ].to_json
end
- let(:ce_teammate_matcher) do
+ subject(:roulette) { Object.new.extend(described_class) }
+
+ def matching_teammate(person)
satisfy do |teammate|
- teammate.username == 'in-gitlab-ce' &&
- teammate.name == 'CE maintainer' &&
- teammate.projects == { 'gitlab-ce' => 'maintainer backend' }
+ teammate.username == person[:username] &&
+ teammate.name == person[:name] &&
+ teammate.role == person[:role] &&
+ teammate.projects == person[:projects]
end
end
- let(:ee_teammate_matcher) do
- satisfy do |teammate|
- teammate.username == 'in-gitlab-ee' &&
- teammate.name == 'EE reviewer' &&
- teammate.projects == { 'gitlab-ee' => 'reviewer frontend' }
+ def matching_spin(category, reviewer: { username: nil }, maintainer: { username: nil }, optional: nil)
+ satisfy do |spin|
+ spin.category == category &&
+ spin.reviewer&.username == reviewer[:username] &&
+ spin.maintainer&.username == maintainer[:username] &&
+ spin.optional_role == optional
end
end
- subject(:roulette) { Object.new.extend(described_class) }
+ describe '#spin' do
+ let!(:project) { 'gitlab' }
+ let!(:branch_name) { 'a-branch' }
+ let!(:mr_labels) { ['backend', 'devops::create'] }
+ let!(:author) { Gitlab::Danger::Teammate.new('username' => 'filipa') }
+
+ before do
+ [
+ backend_maintainer,
+ frontend_reviewer,
+ frontend_maintainer,
+ software_engineer_in_test,
+ engineering_productivity_reviewer
+ ].each do |person|
+ stub_person_status(instance_double(Gitlab::Danger::Teammate, username: person[:username]), message: 'making GitLab magic')
+ end
+
+ WebMock
+ .stub_request(:get, described_class::ROULETTE_DATA_URL)
+ .to_return(body: teammate_json)
+ allow(subject).to receive_message_chain(:gitlab, :mr_author).and_return(author.username)
+ allow(subject).to receive_message_chain(:gitlab, :mr_labels).and_return(mr_labels)
+ end
+
+ context 'when change contains backend category' do
+ it 'assigns backend reviewer and maintainer' do
+ categories = [:backend]
+ spins = subject.spin(project, categories, branch_name)
+
+ expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
+ end
+ end
+
+ context 'when change contains frontend category' do
+ it 'assigns frontend reviewer and maintainer' do
+ categories = [:frontend]
+ spins = subject.spin(project, categories, branch_name)
+
+ expect(spins).to contain_exactly(matching_spin(:frontend, reviewer: frontend_reviewer, maintainer: frontend_maintainer))
+ end
+ end
+
+ context 'when change contains QA category' do
+ it 'assigns QA reviewer and sets optional QA maintainer' do
+ categories = [:qa]
+ spins = subject.spin(project, categories, branch_name)
+
+ expect(spins).to contain_exactly(matching_spin(:qa, reviewer: software_engineer_in_test, optional: :maintainer))
+ end
+ end
+
+ context 'when change contains Engineering Productivity category' do
+ it 'assigns Engineering Productivity reviewer and fallback to backend maintainer' do
+ categories = [:engineering_productivity]
+ spins = subject.spin(project, categories, branch_name)
+
+ expect(spins).to contain_exactly(matching_spin(:engineering_productivity, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
+ end
+ end
+
+ context 'when change contains test category' do
+ it 'assigns corresponding SET and sets optional test maintainer' do
+ categories = [:test]
+ spins = subject.spin(project, categories, branch_name)
+
+ expect(spins).to contain_exactly(matching_spin(:test, reviewer: software_engineer_in_test, optional: :maintainer))
+ end
+ end
+ end
describe '#team' do
subject(:team) { roulette.team }
@@ -76,7 +185,15 @@ describe Gitlab::Danger::Roulette do
end
it 'returns an array of teammates' do
- is_expected.to contain_exactly(ce_teammate_matcher, ee_teammate_matcher)
+ expected_teammates = [
+ matching_teammate(backend_maintainer),
+ matching_teammate(frontend_reviewer),
+ matching_teammate(frontend_maintainer),
+ matching_teammate(software_engineer_in_test),
+ matching_teammate(engineering_productivity_reviewer)
+ ]
+
+ is_expected.to contain_exactly(*expected_teammates)
end
it 'memoizes the result' do
@@ -86,7 +203,7 @@ describe Gitlab::Danger::Roulette do
end
describe '#project_team' do
- subject { roulette.project_team('gitlab-ce') }
+ subject { roulette.project_team('gitlab-qa') }
before do
WebMock
@@ -95,7 +212,7 @@ describe Gitlab::Danger::Roulette do
end
it 'filters team by project_name' do
- is_expected.to contain_exactly(ce_teammate_matcher)
+ is_expected.to contain_exactly(matching_teammate(software_engineer_in_test))
end
end
@@ -136,15 +253,15 @@ describe Gitlab::Danger::Roulette do
it 'excludes person with no capacity' do
expect(subject.spin_for_person([no_capacity], random: Random.new)).to be_nil
end
+ end
- private
+ private
- def stub_person_status(person, message: 'dummy message', emoji: 'unicorn')
- body = { message: message, emoji: emoji }.to_json
+ def stub_person_status(person, message: 'dummy message', emoji: 'unicorn')
+ body = { message: message, emoji: emoji }.to_json
- WebMock
- .stub_request(:get, "https://gitlab.com/api/v4/users/#{person.username}/status")
- .to_return(body: body)
- end
+ WebMock
+ .stub_request(:get, "https://gitlab.com/api/v4/users/#{person.username}/status")
+ .to_return(body: body)
end
end
diff --git a/spec/lib/gitlab/data_builder/alert_spec.rb b/spec/lib/gitlab/data_builder/alert_spec.rb
new file mode 100644
index 00000000000..b881fb8139b
--- /dev/null
+++ b/spec/lib/gitlab/data_builder/alert_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::DataBuilder::Alert do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project) }
+
+ describe '.build' do
+ let_it_be(:data) { described_class.build(alert) }
+
+ it { expect(data).to be_a(Hash) }
+ it { expect(data[:object_kind]).to eq('alert') }
+
+ it 'contains the correct object attributes', :aggregate_failures do
+ object_attributes = data[:object_attributes]
+
+ expect(object_attributes[:title]).to eq(alert.title)
+ expect(object_attributes[:url]).to eq(Gitlab::Routing.url_helpers.details_project_alert_management_url(project, alert.iid))
+ expect(object_attributes[:severity]).to eq(alert.severity)
+ expect(object_attributes[:events]).to eq(alert.events)
+ expect(object_attributes[:status]).to eq(alert.status_name)
+ expect(object_attributes[:started_at]).to eq(alert.started_at)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/custom_structure_spec.rb b/spec/lib/gitlab/database/custom_structure_spec.rb
new file mode 100644
index 00000000000..f03b5ed0a7f
--- /dev/null
+++ b/spec/lib/gitlab/database/custom_structure_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Database::CustomStructure do
+ let_it_be(:structure) { described_class.new }
+ let_it_be(:filepath) { Rails.root.join(described_class::CUSTOM_DUMP_FILE) }
+ let_it_be(:file_header) do
+ <<~DATA
+ -- this file tracks custom GitLab data, such as foreign keys referencing partitioned tables
+ -- more details can be found in the issue: https://gitlab.com/gitlab-org/gitlab/-/issues/201872
+ SET search_path=public;
+ DATA
+ end
+
+ let(:io) { StringIO.new }
+
+ before do
+ allow(File).to receive(:open).with(filepath, anything).and_yield(io)
+ end
+
+ context 'when there are no partitioned_foreign_keys' do
+ it 'dumps a valid structure file' do
+ structure.dump
+
+ expect(io.string).to eq("#{file_header}\n")
+ end
+ end
+
+ context 'when there are partitioned_foreign_keys' do
+ let!(:first_fk) do
+ Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
+ cascade_delete: true, from_table: 'issues', from_column: 'project_id', to_table: 'projects', to_column: 'id')
+ end
+ let!(:second_fk) do
+ Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
+ cascade_delete: false, from_table: 'issues', from_column: 'moved_to_id', to_table: 'issues', to_column: 'id')
+ end
+
+ it 'dumps a file with the command to restore the current keys' do
+ structure.dump
+
+ expect(io.string).to eq(<<~DATA)
+ #{file_header}
+ COPY partitioned_foreign_keys (id, cascade_delete, from_table, from_column, to_table, to_column) FROM STDIN;
+ #{first_fk.id}\ttrue\tissues\tproject_id\tprojects\tid
+ #{second_fk.id}\tfalse\tissues\tmoved_to_id\tissues\tid
+ \\.
+ DATA
+
+ first_fk.destroy
+ io.truncate(0)
+ io.rewind
+
+ structure.dump
+
+ expect(io.string).to eq(<<~DATA)
+ #{file_header}
+ COPY partitioned_foreign_keys (id, cascade_delete, from_table, from_column, to_table, to_column) FROM STDIN;
+ #{second_fk.id}\tfalse\tissues\tmoved_to_id\tissues\tid
+ \\.
+ DATA
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 203d39be22b..bed444ee7c7 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1539,12 +1539,17 @@ describe Gitlab::Database::MigrationHelpers do
end
describe '#create_or_update_plan_limit' do
- class self::Plan < ActiveRecord::Base
- self.table_name = 'plans'
- end
+ before do
+ stub_const('Plan', Class.new(ActiveRecord::Base))
+ stub_const('PlanLimits', Class.new(ActiveRecord::Base))
+
+ Plan.class_eval do
+ self.table_name = 'plans'
+ end
- class self::PlanLimits < ActiveRecord::Base
- self.table_name = 'plan_limits'
+ PlanLimits.class_eval do
+ self.table_name = 'plan_limits'
+ end
end
it 'properly escapes names' do
@@ -1560,28 +1565,28 @@ describe Gitlab::Database::MigrationHelpers do
context 'when plan does not exist' do
it 'does not create any plan limits' do
expect { model.create_or_update_plan_limit('project_hooks', 'plan_name', 10) }
- .not_to change { self.class::PlanLimits.count }
+ .not_to change { PlanLimits.count }
end
end
context 'when plan does exist' do
- let!(:plan) { self.class::Plan.create!(name: 'plan_name') }
+ let!(:plan) { Plan.create!(name: 'plan_name') }
context 'when limit does not exist' do
it 'inserts a new plan limits' do
expect { model.create_or_update_plan_limit('project_hooks', 'plan_name', 10) }
- .to change { self.class::PlanLimits.count }.by(1)
+ .to change { PlanLimits.count }.by(1)
- expect(self.class::PlanLimits.pluck(:project_hooks)).to contain_exactly(10)
+ expect(PlanLimits.pluck(:project_hooks)).to contain_exactly(10)
end
end
context 'when limit does exist' do
- let!(:plan_limit) { self.class::PlanLimits.create!(plan_id: plan.id) }
+ let!(:plan_limit) { PlanLimits.create!(plan_id: plan.id) }
it 'updates an existing plan limits' do
expect { model.create_or_update_plan_limit('project_hooks', 'plan_name', 999) }
- .not_to change { self.class::PlanLimits.count }
+ .not_to change { PlanLimits.count }
expect(plan_limit.reload.project_hooks).to eq(999)
end
@@ -1605,19 +1610,23 @@ describe Gitlab::Database::MigrationHelpers do
describe '#backfill_iids' do
include MigrationsHelpers
- class self::Issue < ActiveRecord::Base
- include AtomicInternalId
+ before do
+ stub_const('Issue', Class.new(ActiveRecord::Base))
+
+ Issue.class_eval do
+ include AtomicInternalId
- self.table_name = 'issues'
- self.inheritance_column = :_type_disabled
+ self.table_name = 'issues'
+ self.inheritance_column = :_type_disabled
- belongs_to :project, class_name: "::Project"
+ belongs_to :project, class_name: "::Project"
- has_internal_id :iid,
- scope: :project,
- init: ->(s) { s&.project&.issues&.maximum(:iid) },
- backfill: true,
- presence: false
+ has_internal_id :iid,
+ scope: :project,
+ init: ->(s) { s&.project&.issues&.maximum(:iid) },
+ backfill: true,
+ presence: false
+ end
end
let(:namespaces) { table(:namespaces) }
@@ -1636,7 +1645,7 @@ describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue = self.class::Issue.create!(project_id: project.id)
+ issue = Issue.create!(project_id: project.id)
expect(issue.iid).to eq(1)
end
@@ -1647,7 +1656,7 @@ describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_b = self.class::Issue.create!(project_id: project.id)
+ issue_b = Issue.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.iid).to eq(2)
@@ -1662,8 +1671,8 @@ describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_a = self.class::Issue.create!(project_id: project_a.id)
- issue_b = self.class::Issue.create!(project_id: project_b.id)
+ issue_a = Issue.create!(project_id: project_a.id)
+ issue_b = Issue.create!(project_id: project_b.id)
expect(issue_a.iid).to eq(2)
expect(issue_b.iid).to eq(3)
@@ -1672,7 +1681,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'when the new code creates a row post deploy but before the migration runs' do
it 'does not change the row iid' do
project = setup
- issue = self.class::Issue.create!(project_id: project.id)
+ issue = Issue.create!(project_id: project.id)
model.backfill_iids('issues')
@@ -1683,7 +1692,7 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
model.backfill_iids('issues')
@@ -1697,8 +1706,8 @@ describe Gitlab::Database::MigrationHelpers do
project_b = setup
issue_a = issues.create!(project_id: project_a.id)
issue_b = issues.create!(project_id: project_b.id)
- issue_c = self.class::Issue.create!(project_id: project_a.id)
- issue_d = self.class::Issue.create!(project_id: project_b.id)
+ issue_c = Issue.create!(project_id: project_a.id)
+ issue_d = Issue.create!(project_id: project_b.id)
model.backfill_iids('issues')
@@ -1712,12 +1721,12 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
model.backfill_iids('issues')
- issue_d = self.class::Issue.create!(project_id: project.id)
- issue_e = self.class::Issue.create!(project_id: project.id)
+ issue_d = Issue.create!(project_id: project.id)
+ issue_e = Issue.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(2)
@@ -1731,14 +1740,14 @@ describe Gitlab::Database::MigrationHelpers do
project_b = setup
issue_a = issues.create!(project_id: project_a.id)
issue_b = issues.create!(project_id: project_b.id)
- issue_c = self.class::Issue.create!(project_id: project_a.id)
- issue_d = self.class::Issue.create!(project_id: project_b.id)
+ issue_c = Issue.create!(project_id: project_a.id)
+ issue_d = Issue.create!(project_id: project_b.id)
model.backfill_iids('issues')
- issue_e = self.class::Issue.create!(project_id: project_a.id)
- issue_f = self.class::Issue.create!(project_id: project_b.id)
- issue_g = self.class::Issue.create!(project_id: project_a.id)
+ issue_e = Issue.create!(project_id: project_a.id)
+ issue_f = Issue.create!(project_id: project_b.id)
+ issue_g = Issue.create!(project_id: project_a.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(1)
@@ -1754,7 +1763,7 @@ describe Gitlab::Database::MigrationHelpers do
it 'backfills iids' do
project = setup
issue_a = issues.create!(project_id: project.id)
- issue_b = self.class::Issue.create!(project_id: project.id)
+ issue_b = Issue.create!(project_id: project.id)
issue_c = issues.create!(project_id: project.id)
model.backfill_iids('issues')
@@ -1768,12 +1777,12 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_d = issues.create!(project_id: project.id)
model.backfill_iids('issues')
- issue_e = self.class::Issue.create!(project_id: project.id)
+ issue_e = Issue.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(2)
@@ -1787,9 +1796,9 @@ describe Gitlab::Database::MigrationHelpers do
it 'backfills iids' do
project = setup
issue_a = issues.create!(project_id: project.id)
- issue_b = self.class::Issue.create!(project_id: project.id)
+ issue_b = Issue.create!(project_id: project.id)
issue_c = issues.create!(project_id: project.id)
- issue_d = self.class::Issue.create!(project_id: project.id)
+ issue_d = Issue.create!(project_id: project.id)
model.backfill_iids('issues')
@@ -1803,13 +1812,13 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_d = issues.create!(project_id: project.id)
- issue_e = self.class::Issue.create!(project_id: project.id)
+ issue_e = Issue.create!(project_id: project.id)
model.backfill_iids('issues')
- issue_f = self.class::Issue.create!(project_id: project.id)
+ issue_f = Issue.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(2)
@@ -1825,7 +1834,7 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_c.delete
model.backfill_iids('issues')
@@ -1838,12 +1847,12 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_c.delete
model.backfill_iids('issues')
- issue_d = self.class::Issue.create!(project_id: project.id)
+ issue_d = Issue.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(2)
@@ -1856,7 +1865,7 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_c.delete
issue_d = issues.create!(project_id: project.id)
@@ -1871,13 +1880,13 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_c.delete
issue_d = issues.create!(project_id: project.id)
model.backfill_iids('issues')
- issue_e = self.class::Issue.create!(project_id: project.id)
+ issue_e = Issue.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(2)
@@ -1891,9 +1900,9 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_c.delete
- issue_d = self.class::Issue.create!(project_id: project.id)
+ issue_d = Issue.create!(project_id: project.id)
model.backfill_iids('issues')
@@ -1906,13 +1915,13 @@ describe Gitlab::Database::MigrationHelpers do
project = setup
issue_a = issues.create!(project_id: project.id)
issue_b = issues.create!(project_id: project.id)
- issue_c = self.class::Issue.create!(project_id: project.id)
+ issue_c = Issue.create!(project_id: project.id)
issue_c.delete
- issue_d = self.class::Issue.create!(project_id: project.id)
+ issue_d = Issue.create!(project_id: project.id)
model.backfill_iids('issues')
- issue_e = self.class::Issue.create!(project_id: project.id)
+ issue_e = Issue.create!(project_id: project.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(2)
@@ -1929,7 +1938,7 @@ describe Gitlab::Database::MigrationHelpers do
model.backfill_iids('issues')
- issue_b = self.class::Issue.create!(project_id: project_b.id)
+ issue_b = Issue.create!(project_id: project_b.id)
expect(issue_a.reload.iid).to eq(1)
expect(issue_b.reload.iid).to eq(1)
@@ -2066,6 +2075,34 @@ describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:check_constraint_exists?).and_return(false)
end
+ context 'constraint name validation' do
+ it 'raises an error when too long' do
+ expect do
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ 'a' * (Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH + 1)
+ )
+ end.to raise_error(RuntimeError)
+ end
+
+ it 'does not raise error when the length is acceptable' do
+ constraint_name = 'a' * Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
+
+ expect(model).to receive(:transaction_open?).and_return(false)
+ expect(model).to receive(:check_constraint_exists?).and_return(false)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ constraint_name,
+ validate: false
+ )
+ end
+ end
+
context 'inside a transaction' do
it 'raises an error' do
expect(model).to receive(:transaction_open?).and_return(true)
diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
index 0f68201a153..dee1d7df1a9 100644
--- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
+++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
@@ -3,39 +3,48 @@
require 'spec_helper'
describe Gitlab::Database::ObsoleteIgnoredColumns do
- module Testing
+ before do
+ stub_const('Testing', Module.new)
+ stub_const('Testing::MyBase', Class.new(ActiveRecord::Base))
+ stub_const('SomeAbstract', Class.new(Testing::MyBase))
+ stub_const('Testing::B', Class.new(Testing::MyBase))
+ stub_const('Testing::A', Class.new(SomeAbstract))
+ stub_const('Testing::C', Class.new(Testing::MyBase))
+
# Used a fixed date to prevent tests failing across date boundaries
- REMOVE_DATE = Date.new(2019, 12, 16)
+ stub_const('REMOVE_DATE', Date.new(2019, 12, 16))
- class MyBase < ApplicationRecord
- end
+ Testing.module_eval do
+ Testing::MyBase.class_eval do
+ end
- class SomeAbstract < MyBase
- include IgnorableColumns
+ SomeAbstract.class_eval do
+ include IgnorableColumns
- self.abstract_class = true
+ self.abstract_class = true
- self.table_name = 'projects'
+ self.table_name = 'projects'
- ignore_column :unused, remove_after: '2019-01-01', remove_with: '12.0'
- end
+ ignore_column :unused, remove_after: '2019-01-01', remove_with: '12.0'
+ end
- class B < MyBase
- include IgnorableColumns
+ Testing::B.class_eval do
+ include IgnorableColumns
- self.table_name = 'issues'
+ self.table_name = 'issues'
- ignore_column :id, :other, remove_after: '2019-01-01', remove_with: '12.0'
- ignore_column :not_used_but_still_ignored, remove_after: REMOVE_DATE.to_s, remove_with: '12.1'
- end
+ ignore_column :id, :other, remove_after: '2019-01-01', remove_with: '12.0'
+ ignore_column :not_used_but_still_ignored, remove_after: REMOVE_DATE.to_s, remove_with: '12.1'
+ end
- class A < SomeAbstract
- ignore_column :also_unused, remove_after: '2019-02-01', remove_with: '12.1'
- ignore_column :not_used_but_still_ignored, remove_after: REMOVE_DATE.to_s, remove_with: '12.1'
- end
+ Testing::A.class_eval do
+ ignore_column :also_unused, remove_after: '2019-02-01', remove_with: '12.1'
+ ignore_column :not_used_but_still_ignored, remove_after: REMOVE_DATE.to_s, remove_with: '12.1'
+ end
- class C < MyBase
- self.table_name = 'users'
+ Testing::C.class_eval do
+ self.table_name = 'users'
+ end
end
end
@@ -43,7 +52,7 @@ describe Gitlab::Database::ObsoleteIgnoredColumns do
describe '#execute' do
it 'returns a list of class names and columns pairs' do
- Timecop.freeze(Testing::REMOVE_DATE) do
+ Timecop.freeze(REMOVE_DATE) do
expect(subject.execute).to eq([
['Testing::A', {
'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index 0e2fb047469..9cec77b434d 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -2,17 +2,21 @@
require 'spec_helper'
-describe Gitlab::Database::PartitioningMigrationHelpers do
+describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers do
+ include TriggerHelpers
+
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
end
let_it_be(:connection) { ActiveRecord::Base.connection }
let(:referenced_table) { :issues }
- let(:function_name) { model.fk_function_name(referenced_table) }
- let(:trigger_name) { model.fk_trigger_name(referenced_table) }
+ let(:function_name) { '_test_partitioned_foreign_keys_function' }
+ let(:trigger_name) { '_test_partitioned_foreign_keys_trigger' }
before do
allow(model).to receive(:puts)
+ allow(model).to receive(:fk_function_name).and_return(function_name)
+ allow(model).to receive(:fk_trigger_name).and_return(trigger_name)
end
describe 'adding a foreign key' do
@@ -25,7 +29,7 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
model.add_partitioned_foreign_key :issue_assignees, referenced_table
expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -41,7 +45,7 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
expect_function_to_contain(function_name,
'delete from issue_assignees where issue_id = old.id',
'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -57,7 +61,7 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
expect_function_to_contain(function_name,
'delete from issues where moved_to_id = old.id',
'delete from issues where duplicated_to_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -66,7 +70,7 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
model.add_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
expect_function_to_contain(function_name, 'delete from issues where moved_to_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
end
@@ -77,7 +81,7 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
model.add_partitioned_foreign_key :issue_assignees, referenced_table, on_delete: :nullify
expect_function_to_contain(function_name, 'update issue_assignees set issue_id = null where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -86,7 +90,7 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
model.add_partitioned_foreign_key :issues, referenced_table, column: :duplicated_to_id
expect_function_to_contain(function_name, 'delete from issues where duplicated_to_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -97,7 +101,7 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
model.add_partitioned_foreign_key :user_preferences, referenced_table, column: :user_id, primary_key: :user_id
expect_function_to_contain(function_name, 'delete from user_preferences where user_id = old.user_id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -135,12 +139,12 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
expect_function_to_contain(function_name,
'delete from issue_assignees where issue_id = old.id',
'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
model.remove_partitioned_foreign_key :issue_assignees, referenced_table
expect_function_to_contain(function_name, 'delete from epic_issues where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -151,12 +155,12 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
it 'removes the trigger function altogether' do
expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
model.remove_partitioned_foreign_key :issue_assignees, referenced_table
- expect(find_function_def(function_name)).to be_nil
- expect(find_trigger_def(trigger_name)).to be_nil
+ expect_function_not_to_exist(function_name)
+ expect_trigger_not_to_exist(referenced_table, trigger_name)
end
end
@@ -167,12 +171,12 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
it 'ignores the invalid key and properly recreates the trigger function' do
expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
model.remove_partitioned_foreign_key :issues, referenced_table, column: :moved_to_id
expect_function_to_contain(function_name, 'delete from issue_assignees where issue_id = old.id')
- expect_valid_function_trigger(trigger_name, function_name)
+ expect_valid_function_trigger(referenced_table, trigger_name, function_name, after: 'delete')
end
end
@@ -186,45 +190,4 @@ describe Gitlab::Database::PartitioningMigrationHelpers do
end
end
end
-
- def expect_function_to_contain(name, *statements)
- return_stmt, *body_stmts = parsed_function_statements(name).reverse
-
- expect(return_stmt).to eq('return old')
- expect(body_stmts).to contain_exactly(*statements)
- end
-
- def expect_valid_function_trigger(name, fn_name)
- event, activation, definition = cleaned_trigger_def(name)
-
- expect(event).to eq('delete')
- expect(activation).to eq('after')
- expect(definition).to eq("execute procedure #{fn_name}()")
- end
-
- def parsed_function_statements(name)
- cleaned_definition = find_function_def(name)['fn_body'].downcase.gsub(/\s+/, ' ')
- statements = cleaned_definition.sub(/\A\s*begin\s*(.*)\s*end\s*\Z/, "\\1")
- statements.split(';').map! { |stmt| stmt.strip.presence }.compact!
- end
-
- def find_function_def(name)
- connection.execute("select prosrc as fn_body from pg_proc where proname = '#{name}';").first
- end
-
- def cleaned_trigger_def(name)
- find_trigger_def(name).values_at('event', 'activation', 'definition').map!(&:downcase)
- end
-
- def find_trigger_def(name)
- connection.execute(<<~SQL).first
- select
- string_agg(event_manipulation, ',') as event,
- action_timing as activation,
- action_statement as definition
- from information_schema.triggers
- where trigger_name = '#{name}'
- group by 2, 3
- SQL
- end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
new file mode 100644
index 00000000000..586b57d2002
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -0,0 +1,289 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers do
+ include PartitioningHelpers
+ include TriggerHelpers
+
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ let_it_be(:connection) { ActiveRecord::Base.connection }
+ let(:template_table) { :audit_events }
+ let(:partitioned_table) { '_test_migration_partitioned_table' }
+ let(:function_name) { '_test_migration_function_name' }
+ let(:trigger_name) { '_test_migration_trigger_name' }
+ let(:partition_column) { 'created_at' }
+ let(:min_date) { Date.new(2019, 12) }
+ let(:max_date) { Date.new(2020, 3) }
+
+ before do
+ allow(migration).to receive(:puts)
+ allow(migration).to receive(:transaction_open?).and_return(false)
+ allow(migration).to receive(:partitioned_table_name).and_return(partitioned_table)
+ allow(migration).to receive(:sync_function_name).and_return(function_name)
+ allow(migration).to receive(:sync_trigger_name).and_return(trigger_name)
+ allow(migration).to receive(:assert_table_is_whitelisted)
+ end
+
+ describe '#partition_table_by_date' do
+ let(:partition_column) { 'created_at' }
+ let(:old_primary_key) { 'id' }
+ let(:new_primary_key) { [old_primary_key, partition_column] }
+
+ context 'when the table is not whitelisted' do
+ let(:template_table) { :this_table_is_not_whitelisted }
+
+ it 'raises an error' do
+ expect(migration).to receive(:assert_table_is_whitelisted).with(template_table).and_call_original
+
+ expect do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+ end.to raise_error(/#{template_table} is not whitelisted for use/)
+ end
+ end
+
+ context 'when run inside a transaction block' do
+ it 'raises an error' do
+ expect(migration).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+ end.to raise_error(/can not be run inside a transaction/)
+ end
+ end
+
+ context 'when the the max_date is less than the min_date' do
+ let(:max_date) { Time.utc(2019, 6) }
+
+ it 'raises an error' do
+ expect do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+ end.to raise_error(/max_date #{max_date} must be greater than min_date #{min_date}/)
+ end
+ end
+
+ context 'when the max_date is equal to the min_date' do
+ let(:max_date) { min_date }
+
+ it 'raises an error' do
+ expect do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+ end.to raise_error(/max_date #{max_date} must be greater than min_date #{min_date}/)
+ end
+ end
+
+ context 'when the given table does not have a primary key' do
+ let(:template_table) { :_partitioning_migration_helper_test_table }
+ let(:partition_column) { :some_field }
+
+ it 'raises an error' do
+ migration.create_table template_table, id: false do |t|
+ t.integer :id
+ t.datetime partition_column
+ end
+
+ expect do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+ end.to raise_error(/primary key not defined for #{template_table}/)
+ end
+ end
+
+ context 'when an invalid partition column is given' do
+ let(:partition_column) { :_this_is_not_real }
+
+ it 'raises an error' do
+ expect do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+ end.to raise_error(/partition column #{partition_column} does not exist/)
+ end
+ end
+
+ describe 'constructing the partitioned table' do
+ it 'creates a table partitioned by the proper column' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ expect(connection.table_exists?(partitioned_table)).to be(true)
+ expect(connection.primary_key(partitioned_table)).to eq(new_primary_key)
+
+ expect_table_partitioned_by(partitioned_table, [partition_column])
+ end
+
+ it 'changes the primary key datatype to bigint' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ pk_column = connection.columns(partitioned_table).find { |c| c.name == old_primary_key }
+
+ expect(pk_column.sql_type).to eq('bigint')
+ end
+
+ context 'with a non-integer primary key datatype' do
+ before do
+ connection.create_table :another_example, id: false do |t|
+ t.string :identifier, primary_key: true
+ t.timestamp :created_at
+ end
+ end
+
+ let(:template_table) { :another_example }
+ let(:old_primary_key) { 'identifier' }
+
+ it 'does not change the primary key datatype' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ original_pk_column = connection.columns(template_table).find { |c| c.name == old_primary_key }
+ pk_column = connection.columns(partitioned_table).find { |c| c.name == old_primary_key }
+
+ expect(pk_column).not_to be_nil
+ expect(pk_column).to eq(original_pk_column)
+ end
+ end
+
+ it 'removes the default from the primary key column' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ pk_column = connection.columns(partitioned_table).find { |c| c.name == old_primary_key }
+
+ expect(pk_column.default_function).to be_nil
+ end
+
+ it 'creates the partitioned table with the same non-key columns' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ copied_columns = filter_columns_by_name(connection.columns(partitioned_table), new_primary_key)
+ original_columns = filter_columns_by_name(connection.columns(template_table), new_primary_key)
+
+ expect(copied_columns).to match_array(original_columns)
+ end
+
+ it 'creates a partition spanning over each month in the range given' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ expect_range_partition_of("#{partitioned_table}_000000", partitioned_table, 'MINVALUE', "'2019-12-01 00:00:00'")
+ expect_range_partition_of("#{partitioned_table}_201912", partitioned_table, "'2019-12-01 00:00:00'", "'2020-01-01 00:00:00'")
+ expect_range_partition_of("#{partitioned_table}_202001", partitioned_table, "'2020-01-01 00:00:00'", "'2020-02-01 00:00:00'")
+ expect_range_partition_of("#{partitioned_table}_202002", partitioned_table, "'2020-02-01 00:00:00'", "'2020-03-01 00:00:00'")
+ end
+ end
+
+ describe 'keeping data in sync with the partitioned table' do
+ let(:template_table) { :todos }
+ let(:model) { Class.new(ActiveRecord::Base) }
+ let(:timestamp) { Time.utc(2019, 12, 1, 12).round }
+
+ before do
+ model.primary_key = :id
+ model.table_name = partitioned_table
+ end
+
+ it 'creates a trigger function on the original table' do
+ expect_function_not_to_exist(function_name)
+ expect_trigger_not_to_exist(template_table, trigger_name)
+
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ expect_function_to_exist(function_name)
+ expect_valid_function_trigger(template_table, trigger_name, function_name, after: %w[delete insert update])
+ end
+
+ it 'syncs inserts to the partitioned tables' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ expect(model.count).to eq(0)
+
+ first_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+ second_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+
+ expect(model.count).to eq(2)
+ expect(model.find(first_todo.id).attributes).to eq(first_todo.attributes)
+ expect(model.find(second_todo.id).attributes).to eq(second_todo.attributes)
+ end
+
+ it 'syncs updates to the partitioned tables' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ first_todo = create(:todo, :pending, commit_id: nil, created_at: timestamp, updated_at: timestamp)
+ second_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+
+ expect(model.count).to eq(2)
+
+ first_copy = model.find(first_todo.id)
+ second_copy = model.find(second_todo.id)
+
+ expect(first_copy.attributes).to eq(first_todo.attributes)
+ expect(second_copy.attributes).to eq(second_todo.attributes)
+
+ first_todo.update(state_event: 'done', commit_id: 'abc123', updated_at: timestamp + 1.second)
+
+ expect(model.count).to eq(2)
+ expect(first_copy.reload.attributes).to eq(first_todo.attributes)
+ expect(second_copy.reload.attributes).to eq(second_todo.attributes)
+ end
+
+ it 'syncs deletes to the partitioned tables' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ first_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+ second_todo = create(:todo, created_at: timestamp, updated_at: timestamp)
+
+ expect(model.count).to eq(2)
+
+ first_todo.destroy
+
+ expect(model.count).to eq(1)
+ expect(model.find_by_id(first_todo.id)).to be_nil
+ expect(model.find(second_todo.id).attributes).to eq(second_todo.attributes)
+ end
+ end
+ end
+
+ describe '#drop_partitioned_table_for' do
+ let(:expected_tables) do
+ %w[000000 201912 202001 202002].map { |suffix| "#{partitioned_table}_#{suffix}" }.unshift(partitioned_table)
+ end
+
+ context 'when the table is not whitelisted' do
+ let(:template_table) { :this_table_is_not_whitelisted }
+
+ it 'raises an error' do
+ expect(migration).to receive(:assert_table_is_whitelisted).with(template_table).and_call_original
+
+ expect do
+ migration.drop_partitioned_table_for template_table
+ end.to raise_error(/#{template_table} is not whitelisted for use/)
+ end
+ end
+
+ it 'drops the trigger syncing to the partitioned table' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ expect_function_to_exist(function_name)
+ expect_valid_function_trigger(template_table, trigger_name, function_name, after: %w[delete insert update])
+
+ migration.drop_partitioned_table_for template_table
+
+ expect_function_not_to_exist(function_name)
+ expect_trigger_not_to_exist(template_table, trigger_name)
+ end
+
+ it 'drops the partitioned copy and all partitions' do
+ migration.partition_table_by_date template_table, partition_column, min_date: min_date, max_date: max_date
+
+ expected_tables.each do |table|
+ expect(connection.table_exists?(table)).to be(true)
+ end
+
+ migration.drop_partitioned_table_for template_table
+
+ expected_tables.each do |table|
+ expect(connection.table_exists?(table)).to be(false)
+ end
+ end
+ end
+
+ def filter_columns_by_name(columns, names)
+ columns.reject { |c| names.include?(c.name) }
+ end
+end
diff --git a/spec/lib/gitlab/database/schema_cleaner_spec.rb b/spec/lib/gitlab/database/schema_cleaner_spec.rb
index ee9477156fb..adaeb85d52d 100644
--- a/spec/lib/gitlab/database/schema_cleaner_spec.rb
+++ b/spec/lib/gitlab/database/schema_cleaner_spec.rb
@@ -15,10 +15,6 @@ describe Gitlab::Database::SchemaCleaner do
expect(subject).not_to include('COMMENT ON EXTENSION')
end
- it 'includes the plpgsql extension' do
- expect(subject).to include('CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;')
- end
-
it 'sets the search_path' do
expect(subject.split("\n").first).to eq('SET search_path=public;')
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 9c8c9749125..d7eee594631 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -35,9 +35,6 @@ describe Gitlab::Database::WithLockRetries do
end
context 'when lock retry is enabled' do
- class ActiveRecordSecond < ActiveRecord::Base
- end
-
let(:lock_fiber) do
Fiber.new do
# Initiating a second DB connection for the lock
@@ -52,6 +49,8 @@ describe Gitlab::Database::WithLockRetries do
end
before do
+ stub_const('ActiveRecordSecond', Class.new(ActiveRecord::Base))
+
lock_fiber.resume # start the transaction and lock the table
end
diff --git a/spec/lib/gitlab/dependency_linker/go_mod_linker_spec.rb b/spec/lib/gitlab/dependency_linker/go_mod_linker_spec.rb
new file mode 100644
index 00000000000..769daa0b3a6
--- /dev/null
+++ b/spec/lib/gitlab/dependency_linker/go_mod_linker_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::DependencyLinker::GoModLinker do
+ let(:file_name) { 'go.mod' }
+ let(:file_content) do
+ <<-CONTENT.strip_heredoc
+ module gitlab.com/gitlab-org/gitlab-workhorse
+
+ go 1.12
+
+ require (
+ github.com/BurntSushi/toml v0.3.1
+ github.com/FZambia/sentinel v1.0.0
+ github.com/alecthomas/chroma v0.7.3
+ github.com/dgrijalva/jwt-go v3.2.0+incompatible
+ github.com/getsentry/raven-go v0.1.2
+ github.com/golang/gddo v0.0.0-20190419222130-af0f2af80721
+ github.com/golang/protobuf v1.3.2
+ github.com/gomodule/redigo v2.0.0+incompatible
+ github.com/gorilla/websocket v1.4.0
+ github.com/grpc-ecosystem/go-grpc-middleware v1.0.0
+ github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
+ github.com/jfbus/httprs v0.0.0-20190827093123-b0af8319bb15
+ github.com/jpillora/backoff v0.0.0-20170918002102-8eab2debe79d
+ github.com/prometheus/client_golang v1.0.0
+ github.com/rafaeljusto/redigomock v0.0.0-20190202135759-257e089e14a1
+ github.com/sebest/xff v0.0.0-20160910043805-6c115e0ffa35
+ github.com/sirupsen/logrus v1.3.0
+ github.com/stretchr/testify v1.5.1
+ gitlab.com/gitlab-org/gitaly v1.74.0
+ gitlab.com/gitlab-org/labkit v0.0.0-20200520155818-96e583c57891
+ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f
+ golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa
+ golang.org/x/tools v0.0.0-20200117161641-43d50277825c
+ google.golang.org/grpc v1.24.0
+ gopkg.in/yaml.v2 v2.2.8 // indirect
+ honnef.co/go/tools v0.0.1-2019.2.3
+ )
+ CONTENT
+ end
+
+ describe '.support?' do
+ it 'supports go.mod' do
+ expect(described_class.support?('go.mod')).to be_truthy
+ end
+
+ it 'does not support other files' do
+ expect(described_class.support?('go.mod.example')).to be_falsey
+ end
+ end
+
+ describe '#link' do
+ subject { Gitlab::Highlight.highlight(file_name, file_content) }
+
+ def link(name, url)
+ %{<a href="#{url}" rel="nofollow noreferrer noopener" target="_blank">#{name}</a>}
+ end
+
+ it 'links the module name' do
+ expect(subject).to include(link('gitlab.com/gitlab-org/gitlab-workhorse', 'https://pkg.go.dev/gitlab.com/gitlab-org/gitlab-workhorse'))
+ end
+
+ it 'links dependencies' do
+ expect(subject).to include(link('github.com/BurntSushi/toml', 'https://pkg.go.dev/github.com/BurntSushi/toml@v0.3.1'))
+ expect(subject).to include(link('github.com/FZambia/sentinel', 'https://pkg.go.dev/github.com/FZambia/sentinel@v1.0.0'))
+ expect(subject).to include(link('github.com/alecthomas/chroma', 'https://pkg.go.dev/github.com/alecthomas/chroma@v0.7.3'))
+ expect(subject).to include(link('github.com/dgrijalva/jwt-go', 'https://pkg.go.dev/github.com/dgrijalva/jwt-go@v3.2.0+incompatible'))
+ expect(subject).to include(link('github.com/getsentry/raven-go', 'https://pkg.go.dev/github.com/getsentry/raven-go@v0.1.2'))
+ expect(subject).to include(link('github.com/golang/gddo', 'https://pkg.go.dev/github.com/golang/gddo@v0.0.0-20190419222130-af0f2af80721'))
+ expect(subject).to include(link('github.com/golang/protobuf', 'https://pkg.go.dev/github.com/golang/protobuf@v1.3.2'))
+ expect(subject).to include(link('github.com/gomodule/redigo', 'https://pkg.go.dev/github.com/gomodule/redigo@v2.0.0+incompatible'))
+ expect(subject).to include(link('github.com/gorilla/websocket', 'https://pkg.go.dev/github.com/gorilla/websocket@v1.4.0'))
+ expect(subject).to include(link('github.com/grpc-ecosystem/go-grpc-middleware', 'https://pkg.go.dev/github.com/grpc-ecosystem/go-grpc-middleware@v1.0.0'))
+ expect(subject).to include(link('github.com/grpc-ecosystem/go-grpc-prometheus', 'https://pkg.go.dev/github.com/grpc-ecosystem/go-grpc-prometheus@v1.2.0'))
+ expect(subject).to include(link('github.com/jfbus/httprs', 'https://pkg.go.dev/github.com/jfbus/httprs@v0.0.0-20190827093123-b0af8319bb15'))
+ expect(subject).to include(link('github.com/jpillora/backoff', 'https://pkg.go.dev/github.com/jpillora/backoff@v0.0.0-20170918002102-8eab2debe79d'))
+ expect(subject).to include(link('github.com/prometheus/client_golang', 'https://pkg.go.dev/github.com/prometheus/client_golang@v1.0.0'))
+ expect(subject).to include(link('github.com/rafaeljusto/redigomock', 'https://pkg.go.dev/github.com/rafaeljusto/redigomock@v0.0.0-20190202135759-257e089e14a1'))
+ expect(subject).to include(link('github.com/sebest/xff', 'https://pkg.go.dev/github.com/sebest/xff@v0.0.0-20160910043805-6c115e0ffa35'))
+ expect(subject).to include(link('github.com/sirupsen/logrus', 'https://pkg.go.dev/github.com/sirupsen/logrus@v1.3.0'))
+ expect(subject).to include(link('github.com/stretchr/testify', 'https://pkg.go.dev/github.com/stretchr/testify@v1.5.1'))
+ expect(subject).to include(link('gitlab.com/gitlab-org/gitaly', 'https://pkg.go.dev/gitlab.com/gitlab-org/gitaly@v1.74.0'))
+ expect(subject).to include(link('gitlab.com/gitlab-org/labkit', 'https://pkg.go.dev/gitlab.com/gitlab-org/labkit@v0.0.0-20200520155818-96e583c57891'))
+ expect(subject).to include(link('golang.org/x/lint', 'https://pkg.go.dev/golang.org/x/lint@v0.0.0-20191125180803-fdd1cda4f05f'))
+ expect(subject).to include(link('golang.org/x/net', 'https://pkg.go.dev/golang.org/x/net@v0.0.0-20200114155413-6afb5195e5aa'))
+ expect(subject).to include(link('golang.org/x/tools', 'https://pkg.go.dev/golang.org/x/tools@v0.0.0-20200117161641-43d50277825c'))
+ expect(subject).to include(link('google.golang.org/grpc', 'https://pkg.go.dev/google.golang.org/grpc@v1.24.0'))
+ expect(subject).to include(link('gopkg.in/yaml.v2', 'https://pkg.go.dev/gopkg.in/yaml.v2@v2.2.8'))
+ expect(subject).to include(link('honnef.co/go/tools', 'https://pkg.go.dev/honnef.co/go/tools@v0.0.1-2019.2.3'))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/dependency_linker/go_sum_linker_spec.rb b/spec/lib/gitlab/dependency_linker/go_sum_linker_spec.rb
new file mode 100644
index 00000000000..f5cb7809ad3
--- /dev/null
+++ b/spec/lib/gitlab/dependency_linker/go_sum_linker_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::DependencyLinker::GoSumLinker do
+ let(:file_name) { 'go.sum' }
+ let(:file_content) do
+ <<-CONTENT.strip_heredoc
+ github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
+ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+ github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
+ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
+ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+ gitlab.com/go-utils/io v0.0.0-20190408212915-156add3f8f97 h1:9EKx8vX3kJzyj977yiWB8iIOXHyvbg8SmfOScw7OcN0=
+ gitlab.com/go-utils/io v0.0.0-20190408212915-156add3f8f97/go.mod h1:cF4ez5kIKPWU1BB1Z4qgu6dQkT3pvknXff8PSlGaNo8=
+ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
+ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+ CONTENT
+ end
+
+ describe '.support?' do
+ it 'supports go.sum' do
+ expect(described_class.support?('go.sum')).to be_truthy
+ end
+
+ it 'does not support other files' do
+ expect(described_class.support?('go.sum.example')).to be_falsey
+ end
+ end
+
+ describe '#link' do
+ subject { Gitlab::Highlight.highlight(file_name, file_content) }
+
+ def link(name, url)
+ %{<a href="#{url}" rel="nofollow noreferrer noopener" target="_blank">#{name}</a>}
+ end
+
+ it 'links modules' do
+ expect(subject).to include(link('github.com/davecgh/go-spew', 'https://pkg.go.dev/github.com/davecgh/go-spew@v1.1.0'))
+ expect(subject).to include(link('github.com/pmezard/go-difflib', 'https://pkg.go.dev/github.com/pmezard/go-difflib@v1.0.0'))
+ expect(subject).to include(link('github.com/stretchr/objx', 'https://pkg.go.dev/github.com/stretchr/objx@v0.1.0'))
+ expect(subject).to include(link('github.com/stretchr/testify', 'https://pkg.go.dev/github.com/stretchr/testify@v1.3.0'))
+ expect(subject).to include(link('gitlab.com/go-utils/io', 'https://pkg.go.dev/gitlab.com/go-utils/io@v0.0.0-20190408212915-156add3f8f97'))
+ expect(subject).to include(link('golang.org/x/xerrors', 'https://pkg.go.dev/golang.org/x/xerrors@v0.0.0-20190717185122-a985d3407aa7'))
+ end
+
+ it 'links checksums' do
+ expect(subject).to include(link('ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=', 'https://sum.golang.org/lookup/github.com/davecgh/go-spew@v1.1.0'))
+ expect(subject).to include(link('J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=', 'https://sum.golang.org/lookup/github.com/davecgh/go-spew@v1.1.0'))
+ expect(subject).to include(link('4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=', 'https://sum.golang.org/lookup/github.com/pmezard/go-difflib@v1.0.0'))
+ expect(subject).to include(link('iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=', 'https://sum.golang.org/lookup/github.com/pmezard/go-difflib@v1.0.0'))
+ expect(subject).to include(link('4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=', 'https://sum.golang.org/lookup/github.com/stretchr/objx@v0.1.0'))
+ expect(subject).to include(link('HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=', 'https://sum.golang.org/lookup/github.com/stretchr/objx@v0.1.0'))
+ expect(subject).to include(link('TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=', 'https://sum.golang.org/lookup/github.com/stretchr/testify@v1.3.0'))
+ expect(subject).to include(link('M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=', 'https://sum.golang.org/lookup/github.com/stretchr/testify@v1.3.0'))
+ expect(subject).to include(link('9EKx8vX3kJzyj977yiWB8iIOXHyvbg8SmfOScw7OcN0=', 'https://sum.golang.org/lookup/gitlab.com/go-utils/io@v0.0.0-20190408212915-156add3f8f97'))
+ expect(subject).to include(link('cF4ez5kIKPWU1BB1Z4qgu6dQkT3pvknXff8PSlGaNo8=', 'https://sum.golang.org/lookup/gitlab.com/go-utils/io@v0.0.0-20190408212915-156add3f8f97'))
+ expect(subject).to include(link('9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=', 'https://sum.golang.org/lookup/golang.org/x/xerrors@v0.0.0-20190717185122-a985d3407aa7'))
+ expect(subject).to include(link('I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=', 'https://sum.golang.org/lookup/golang.org/x/xerrors@v0.0.0-20190717185122-a985d3407aa7'))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/dependency_linker_spec.rb b/spec/lib/gitlab/dependency_linker_spec.rb
index 570a994f520..acd4376615c 100644
--- a/spec/lib/gitlab/dependency_linker_spec.rb
+++ b/spec/lib/gitlab/dependency_linker_spec.rb
@@ -91,5 +91,21 @@ describe Gitlab::DependencyLinker do
described_class.link(blob_name, nil, nil)
end
+
+ it 'links using GoModLinker' do
+ blob_name = 'go.mod'
+
+ expect(described_class::GoModLinker).to receive(:link)
+
+ described_class.link(blob_name, nil, nil)
+ end
+
+ it 'links using GoSumLinker' do
+ blob_name = 'go.sum'
+
+ expect(described_class::GoSumLinker).to receive(:link)
+
+ described_class.link(blob_name, nil, nil)
+ end
end
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index d1592e60d3d..8dbedcf26b9 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -282,6 +282,18 @@ describe Gitlab::Diff::File do
end
end
+ describe '#file_hash' do
+ it 'returns a hash of file_path' do
+ expect(diff_file.file_hash).to eq(Digest::SHA1.hexdigest(diff_file.file_path))
+ end
+ end
+
+ describe '#file_identifier_hash' do
+ it 'returns a hash of file_identifier' do
+ expect(diff_file.file_identifier_hash).to eq(Digest::SHA1.hexdigest(diff_file.file_identifier))
+ end
+ end
+
context 'diff file stats' do
let(:diff_file) do
described_class.new(diff,
diff --git a/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
index edf30ffc56f..3f88f39ba92 100644
--- a/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/image_formatter_spec.rb
@@ -10,6 +10,7 @@ describe Gitlab::Diff::Formatters::ImageFormatter do
head_sha: 789,
old_path: 'old_image.png',
new_path: 'new_image.png',
+ file_identifier_hash: '777',
position_type: 'image'
}
end
diff --git a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
index fa129a20e58..50dd597c5a7 100644
--- a/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
+++ b/spec/lib/gitlab/diff/formatters/text_formatter_spec.rb
@@ -10,6 +10,7 @@ describe Gitlab::Diff::Formatters::TextFormatter do
head_sha: 789,
old_path: 'old_path.txt',
new_path: 'new_path.txt',
+ file_identifier_hash: '777',
line_range: nil
}
end
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index 10749ec024d..b32a2c59bb9 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -574,6 +574,86 @@ describe Gitlab::Diff::Position do
end
end
+ describe '#find_diff_file_from' do
+ context "position for a diff file that has changed from symlink to regular file" do
+ let(:commit) { project.commit("81e6355ce4e1544a3524b230952c12455de0777b") }
+
+ let(:old_symlink_file_identifier_hash) { "bfa430463f33619872d52a6b85ced59c973e42dc" }
+ let(:new_regular_file_identifier_hash) { "e25b60c2e5ffb977d2b1431b96c6f7800c3c3529" }
+ let(:file_identifier_hash) { new_regular_file_identifier_hash }
+
+ let(:args) do
+ {
+ file_identifier_hash: file_identifier_hash,
+ old_path: "symlink",
+ new_path: "symlink",
+ old_line: nil,
+ new_line: 1,
+ diff_refs: commit.diff_refs
+ }
+ end
+
+ let(:diffable) { commit.diff_refs.compare_in(project) }
+
+ subject(:diff_file) { described_class.new(args).find_diff_file_from(diffable) }
+
+ context 'when file_identifier_hash is disabled' do
+ before do
+ stub_feature_flags(file_identifier_hash: false)
+ end
+
+ it "returns the first diff file" do
+ expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
+ end
+ end
+
+ context 'when file_identifier_hash is enabled' do
+ before do
+ stub_feature_flags(file_identifier_hash: true)
+ end
+
+ context 'for new regular file' do
+ it "returns the correct diff file" do
+ expect(diff_file.file_identifier_hash).to eq(new_regular_file_identifier_hash)
+ end
+ end
+
+ context 'for old symlink file' do
+ let(:args) do
+ {
+ file_identifier_hash: old_symlink_file_identifier_hash,
+ old_path: "symlink",
+ new_path: "symlink",
+ old_line: 1,
+ new_line: nil,
+ diff_refs: commit.diff_refs
+ }
+ end
+
+ it "returns the correct diff file" do
+ expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
+ end
+ end
+
+ context 'when file_identifier_hash is missing' do
+ let(:file_identifier_hash) { nil }
+
+ it "returns the first diff file" do
+ expect(diff_file.file_identifier_hash).to eq(old_symlink_file_identifier_hash)
+ end
+ end
+
+ context 'when file_identifier_hash cannot be found' do
+ let(:file_identifier_hash) { "missingidentifier" }
+
+ it "returns nil" do
+ expect(diff_file).to be_nil
+ end
+ end
+ end
+ end
+ end
+
describe '#==' do
let(:commit) { project.commit("570e7b2abdd848b95f2f578043fc23bd6f6fd24d") }
diff --git a/spec/lib/gitlab/doctor/secrets_spec.rb b/spec/lib/gitlab/doctor/secrets_spec.rb
new file mode 100644
index 00000000000..f118519fd9f
--- /dev/null
+++ b/spec/lib/gitlab/doctor/secrets_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Doctor::Secrets do
+ let!(:user) { create(:user, otp_secret: "test") }
+ let!(:group) { create(:group, runners_token: "test") }
+ let(:logger) { double(:logger).as_null_object }
+
+ subject { described_class.new(logger).run! }
+
+ context 'when encrypted attributes are properly set' do
+ it 'detects decryptable secrets' do
+ expect(logger).to receive(:info).with(/User failures: 0/)
+ expect(logger).to receive(:info).with(/Group failures: 0/)
+
+ subject
+ end
+ end
+
+ context 'when attr_encrypted values are not decrypting' do
+ it 'marks undecryptable values as bad' do
+ user.encrypted_otp_secret = "invalid"
+ user.save!
+
+ expect(logger).to receive(:info).with(/User failures: 1/)
+
+ subject
+ end
+ end
+
+ context 'when TokenAuthenticatable values are not decrypting' do
+ it 'marks undecryptable values as bad' do
+ group.runners_token_encrypted = "invalid"
+ group.save!
+
+ expect(logger).to receive(:info).with(/Group failures: 1/)
+
+ subject
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index 909a7618df4..af963e1b695 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -65,16 +65,24 @@ describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
- context 'and current user can update noteable' do
- before do
- project.add_developer(user)
- end
-
- it 'does not raise an error' do
- # One system note is created for the 'close' event
- expect { receiver.execute }.to change { noteable.notes.count }.by(1)
-
- expect(noteable.reload).to be_closed
+ [true, false].each do |state_tracking_enabled|
+ context "and current user can update noteable #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
+ before do
+ stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
+
+ project.add_developer(user)
+ end
+
+ it 'does not raise an error' do
+ if state_tracking_enabled
+ expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
+ else
+ # One system note is created for the 'close' event
+ expect { receiver.execute }.to change { noteable.notes.count }.by(1)
+ end
+
+ expect(noteable.reload).to be_closed
+ end
end
end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
new file mode 100644
index 00000000000..da7205c7f4f
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
+ after do
+ if described_class.instance_variable_defined?(:@permitted_arguments_for_worker)
+ described_class.remove_instance_variable(:@permitted_arguments_for_worker)
+ end
+ end
+
+ describe '.filter_arguments' do
+ it 'returns a lazy enumerator' do
+ filtered = described_class.filter_arguments([1, 'string'], 'TestWorker')
+
+ expect(filtered).to be_a(Enumerator::Lazy)
+ expect(filtered.to_a).to eq([1, described_class::FILTERED_STRING])
+ end
+
+ context 'arguments filtering' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:klass, :expected) do
+ 'UnknownWorker' | [1, described_class::FILTERED_STRING, described_class::FILTERED_STRING, described_class::FILTERED_STRING]
+ 'NoPermittedArguments' | [1, described_class::FILTERED_STRING, described_class::FILTERED_STRING, described_class::FILTERED_STRING]
+ 'OnePermittedArgument' | [1, 'string', described_class::FILTERED_STRING, described_class::FILTERED_STRING]
+ 'AllPermittedArguments' | [1, 'string', [1, 2], { a: 1 }]
+ end
+
+ with_them do
+ before do
+ stub_const('NoPermittedArguments', double(loggable_arguments: []))
+ stub_const('OnePermittedArgument', double(loggable_arguments: [1]))
+ stub_const('AllPermittedArguments', double(loggable_arguments: [0, 1, 2, 3]))
+ end
+
+ it do
+ expect(described_class.filter_arguments([1, 'string', [1, 2], { a: 1 }], klass).to_a)
+ .to eq(expected)
+ end
+ end
+ end
+ end
+
+ describe '.permitted_arguments_for_worker' do
+ it 'returns the loggable_arguments for a worker class as a set' do
+ stub_const('TestWorker', double(loggable_arguments: [1, 1]))
+
+ expect(described_class.permitted_arguments_for_worker('TestWorker'))
+ .to eq([1].to_set)
+ end
+
+ it 'returns an empty set when the worker class does not exist' do
+ expect(described_class.permitted_arguments_for_worker('TestWorker'))
+ .to eq(Set.new)
+ end
+
+ it 'returns an empty set when the worker class does not respond to loggable_arguments' do
+ stub_const('TestWorker', 1)
+
+ expect(described_class.permitted_arguments_for_worker('TestWorker'))
+ .to eq(Set.new)
+ end
+
+ it 'returns an empty set when loggable_arguments cannot be converted to a set' do
+ stub_const('TestWorker', double(loggable_arguments: 1))
+
+ expect(described_class.permitted_arguments_for_worker('TestWorker'))
+ .to eq(Set.new)
+ end
+
+ it 'memoizes the results' do
+ worker_class = double
+
+ stub_const('TestWorker', worker_class)
+
+ expect(worker_class).to receive(:loggable_arguments).once.and_return([])
+
+ described_class.permitted_arguments_for_worker('TestWorker')
+ described_class.permitted_arguments_for_worker('TestWorker')
+ end
+ end
+
+ describe '.loggable_arguments' do
+ it 'filters and limits the arguments, then converts to strings' do
+ half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
+ args = [[1, 2], 'a' * half_limit, 'b' * half_limit, 'c' * half_limit, 'd']
+
+ stub_const('LoggableArguments', double(loggable_arguments: [0, 1, 3, 4]))
+
+ expect(described_class.loggable_arguments(args, 'LoggableArguments'))
+ .to eq(['[1, 2]', 'a' * half_limit, '[FILTERED]', '...'])
+ end
+ end
+
+ describe '#process' do
+ context 'when there is Sidekiq data' do
+ shared_examples 'Sidekiq arguments' do |args_in_job_hash: true|
+ let(:path) { [:extra, :sidekiq, args_in_job_hash ? :job : nil, 'args'].compact }
+ let(:args) { [1, 'string', { a: 1 }, [1, 2]] }
+
+ it 'only allows numeric arguments for an unknown worker' do
+ value = { 'args' => args, 'class' => 'UnknownWorker' }
+
+ value = { job: value } if args_in_job_hash
+
+ expect(subject.process(extra_sidekiq(value)).dig(*path))
+ .to eq([1, described_class::FILTERED_STRING, described_class::FILTERED_STRING, described_class::FILTERED_STRING])
+ end
+
+ it 'allows all argument types for a permitted worker' do
+ value = { 'args' => args, 'class' => 'PostReceive' }
+
+ value = { job: value } if args_in_job_hash
+
+ expect(subject.process(extra_sidekiq(value)).dig(*path))
+ .to eq(args)
+ end
+ end
+
+ context 'when processing via the default error handler' do
+ include_examples 'Sidekiq arguments', args_in_job_hash: true
+ end
+
+ context 'when processing via Gitlab::ErrorTracking' do
+ include_examples 'Sidekiq arguments', args_in_job_hash: false
+ end
+
+ it 'removes a jobstr field if present' do
+ value = {
+ job: { 'args' => [1] },
+ jobstr: { 'args' => [1] }.to_json
+ }
+
+ expect(subject.process(extra_sidekiq(value)))
+ .to eq(extra_sidekiq(value.except(:jobstr)))
+ end
+
+ it 'does nothing with no jobstr' do
+ value = { job: { 'args' => [1] } }
+
+ expect(subject.process(extra_sidekiq(value)))
+ .to eq(extra_sidekiq(value))
+ end
+ end
+
+ context 'when there is no Sidekiq data' do
+ it 'does nothing' do
+ value = {
+ request: {
+ method: 'POST',
+ data: { 'key' => 'value' }
+ }
+ }
+
+ expect(subject.process(value)).to eq(value)
+ end
+ end
+
+ def extra_sidekiq(hash)
+ { extra: { sidekiq: hash } }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 6764d48d14b..c40369f5965 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
+require 'raven/transports/dummy'
+
describe Gitlab::ErrorTracking do
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
@@ -22,7 +24,9 @@ describe Gitlab::ErrorTracking do
allow(described_class).to receive(:sentry_dsn).and_return(Gitlab.config.sentry.dsn)
allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('cid')
- described_class.configure
+ described_class.configure do |config|
+ config.encoding = 'json'
+ end
end
describe '.with_context' do
@@ -179,5 +183,29 @@ describe Gitlab::ErrorTracking do
described_class.track_exception(exception, extra_info)
end
end
+
+ context 'with sidekiq args' do
+ it 'ensures extra.sidekiq.args is a string' do
+ extra = { sidekiq: { 'class' => 'PostReceive', 'args' => [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'] } }
+
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ hash_including({ 'extra.sidekiq' => { 'class' => 'PostReceive', 'args' => ['1', '{"id"=>2, "name"=>"hello"}', 'some-value', 'another-value'] } }))
+
+ described_class.track_exception(exception, extra)
+ end
+
+ it 'filters sensitive arguments before sending' do
+ extra = { sidekiq: { 'class' => 'UnknownWorker', 'args' => ['sensitive string', 1, 2] } }
+
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ hash_including('extra.sidekiq' => { 'class' => 'UnknownWorker', 'args' => ['[FILTERED]', '1', '2'] }))
+
+ described_class.track_exception(exception, extra)
+
+ sentry_event = Gitlab::Json.parse(Raven.client.transport.events.last[1])
+
+ expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index b69e4668d61..d9eeb5b9a2b 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -23,7 +23,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches issue title endpoint' do
result = described_class.match(
- '/my-group/my-project/issues/123/realtime_changes'
+ '/my-group/my-project/-/issues/123/realtime_changes'
)
expect(result).to be_present
@@ -32,7 +32,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches with a project name that includes a suffix of create' do
result = described_class.match(
- '/group/test-create/issues/123/realtime_changes'
+ '/group/test-create/-/issues/123/realtime_changes'
)
expect(result).to be_present
@@ -41,7 +41,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches with a project name that includes a prefix of create' do
result = described_class.match(
- '/group/create-test/issues/123/realtime_changes'
+ '/group/create-test/-/issues/123/realtime_changes'
)
expect(result).to be_present
@@ -50,7 +50,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches project pipelines endpoint' do
result = described_class.match(
- '/my-group/my-project/pipelines.json'
+ '/my-group/my-project/-/pipelines.json'
)
expect(result).to be_present
@@ -95,7 +95,7 @@ describe Gitlab::EtagCaching::Router do
it 'does not match blob with confusing name' do
result = described_class.match(
- '/my-group/my-project/blob/master/pipelines.json'
+ '/my-group/my-project/-/blob/master/pipelines.json'
)
expect(result).to be_blank
@@ -121,7 +121,7 @@ describe Gitlab::EtagCaching::Router do
it 'matches pipeline#show endpoint' do
result = described_class.match(
- '/my-group/my-project/pipelines/2.json'
+ '/my-group/my-project/-/pipelines/2.json'
)
expect(result).to be_present
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 99442cb0ca6..f6e6c031624 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::Experimentation do
}
})
- allow(Feature).to receive(:get).with(:test_experiment_experiment_percentage).and_return double(percentage_of_time_value: enabled_percentage)
+ Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
end
let(:environment) { Rails.env.test? }
diff --git a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
index 0bf46217d60..7279399d1b8 100644
--- a/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/uploads_rewriter_spec.rb
@@ -54,6 +54,14 @@ describe Gitlab::Gfm::UploadsRewriter do
expect(new_paths).not_to include image_uploader.secret
expect(new_paths).not_to include zip_uploader.secret
end
+
+ it 'skips nil files do' do
+ allow_next_instance_of(UploaderFinder) do |finder|
+ allow(finder).to receive(:execute).and_return(nil)
+ end
+
+ expect(new_files).to be_empty
+ end
end
end
@@ -68,16 +76,6 @@ describe Gitlab::Gfm::UploadsRewriter do
expect(moved_text.scan(/\A\[.*?\]/).count).to eq(1)
end
- context 'path traversal in file name' do
- let(:text) do
- "![a](/uploads/11111111111111111111111111111111/../../../../../../../../../../../../../../etc/passwd)"
- end
-
- it 'throw an error' do
- expect { rewriter.rewrite(new_project) }.to raise_error(an_instance_of(StandardError).and(having_attributes(message: "Invalid path")))
- end
- end
-
context "file are stored locally" do
include_examples "files are accessible"
end
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 0d19d35bc52..6aa4f884d20 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -3,6 +3,31 @@
require 'spec_helper'
describe Gitlab::Git::DiffCollection, :seed_helper do
+ before do
+ stub_const('MutatingConstantIterator', Class.new)
+
+ MutatingConstantIterator.class_eval do
+ include Enumerable
+
+ def initialize(count, value)
+ @count = count
+ @value = value
+ end
+
+ def each
+ return enum_for(:each) unless block_given?
+
+ loop do
+ break if @count.zero?
+
+ # It is critical to decrement before yielding. We may never reach the lines after 'yield'.
+ @count -= 1
+ yield @value
+ end
+ end
+ end
+ end
+
subject do
Gitlab::Git::DiffCollection.new(
iterator,
@@ -659,25 +684,4 @@ describe Gitlab::Git::DiffCollection, :seed_helper do
def fake_diff(line_length, line_count)
{ 'diff' => "#{'a' * line_length}\n" * line_count }
end
-
- class MutatingConstantIterator
- include Enumerable
-
- def initialize(count, value)
- @count = count
- @value = value
- end
-
- def each
- return enum_for(:each) unless block_given?
-
- loop do
- break if @count.zero?
-
- # It is critical to decrement before yielding. We may never reach the lines after 'yield'.
- @count -= 1
- yield @value
- end
- end
- end
end
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index ac606da5cc1..ff54d7fbcd3 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -122,6 +122,36 @@ EOT
end
end
end
+
+ context 'using a Gitaly::CommitDelta' do
+ let(:commit_delta) do
+ Gitaly::CommitDelta.new(
+ to_path: ".gitmodules",
+ from_path: ".gitmodules",
+ old_mode: 0100644,
+ new_mode: 0100644,
+ from_id: '357406f3075a57708d0163752905cc1576fceacc',
+ to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0'
+ )
+ end
+ let(:diff) { described_class.new(commit_delta) }
+
+ it 'initializes the diff' do
+ expect(diff.to_hash).to eq(@raw_diff_hash.merge(diff: ''))
+ end
+
+ it 'is not too large' do
+ expect(diff).not_to be_too_large
+ end
+
+ it 'has an empty diff' do
+ expect(diff.diff).to be_empty
+ end
+
+ it 'is not a binary' do
+ expect(diff).not_to have_binary_notice
+ end
+ end
end
describe 'straight diffs' do
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index b396e5d22c3..8339006fe9f 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -8,7 +8,6 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
let(:feature_flag_name) { 'feature-flag-name' }
- let(:feature_flag) { Feature.get(feature_flag_name) }
let(:temp_gitaly_metadata_file) { create_temporary_gitaly_metadata_file }
before(:all) do
@@ -49,10 +48,6 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
context 'when feature flag is not persisted' do
- before do
- allow(Feature).to receive(:persisted?).with(feature_flag).and_return(false)
- end
-
context 'when running puma with multiple threads' do
before do
allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(true)
@@ -97,18 +92,15 @@ describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
context 'when feature flag is persisted' do
- before do
- allow(Feature).to receive(:persisted?).with(feature_flag).and_return(true)
- end
-
it 'returns false when the feature flag is off' do
- allow(feature_flag).to receive(:enabled?).and_return(false)
+ Feature.disable(feature_flag_name)
expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
end
it "returns true when feature flag is on" do
- allow(feature_flag).to receive(:enabled?).and_return(true)
+ Feature.enable(feature_flag_name)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(false)
expect(subject.use_rugged?(repository, feature_flag_name)).to be true
diff --git a/spec/lib/gitlab/git_access_project_spec.rb b/spec/lib/gitlab/git_access_project_spec.rb
new file mode 100644
index 00000000000..f7f7976ccb8
--- /dev/null
+++ b/spec/lib/gitlab/git_access_project_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::GitAccessProject do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let(:actor) { user }
+ let(:project_path) { project.path }
+ let(:namespace_path) { project&.namespace&.path }
+ let(:protocol) { 'ssh' }
+ let(:authentication_abilities) { %i[read_project download_code push_code] }
+ let(:changes) { Gitlab::GitAccess::ANY }
+ let(:push_access_check) { access.check('git-receive-pack', changes) }
+ let(:pull_access_check) { access.check('git-upload-pack', changes) }
+
+ describe '#check_project_accessibility!' do
+ context 'when the project is nil' do
+ let(:project) { nil }
+ let(:project_path) { "new-project" }
+
+ context 'when user is allowed to create project in namespace' do
+ let(:namespace_path) { user.namespace.path }
+ let(:access) do
+ described_class.new(actor, nil,
+ protocol, authentication_abilities: authentication_abilities,
+ repository_path: project_path, namespace_path: namespace_path)
+ end
+
+ it 'blocks pull access with "not found"' do
+ expect { pull_access_check }.to raise_not_found
+ end
+
+ it 'allows push access' do
+ expect { push_access_check }.not_to raise_error
+ end
+ end
+
+ context 'when user is not allowed to create project in namespace' do
+ let(:user2) { create(:user) }
+ let(:namespace_path) { user2.namespace.path }
+ let(:access) do
+ described_class.new(actor, nil,
+ protocol, authentication_abilities: authentication_abilities,
+ repository_path: project_path, namespace_path: namespace_path)
+ end
+
+ it 'blocks push and pull with "not found"' do
+ aggregate_failures do
+ expect { pull_access_check }.to raise_not_found
+ expect { push_access_check }.to raise_not_found
+ end
+ end
+ end
+ end
+ end
+
+ describe '#ensure_project_on_push!' do
+ let(:access) do
+ described_class.new(actor, project,
+ protocol, authentication_abilities: authentication_abilities,
+ repository_path: project_path, namespace_path: namespace_path)
+ end
+
+ before do
+ allow(access).to receive(:changes).and_return(changes)
+ end
+
+ context 'when push' do
+ let(:cmd) { 'git-receive-pack' }
+
+ context 'when project does not exist' do
+ let(:project_path) { "nonexistent" }
+ let(:project) { nil }
+
+ context 'when changes is _any' do
+ let(:changes) { Gitlab::GitAccess::ANY }
+
+ context 'when authentication abilities include push code' do
+ let(:authentication_abilities) { [:push_code] }
+
+ context 'when user can create project in namespace' do
+ let(:namespace_path) { user.namespace.path }
+
+ it 'creates a new project' do
+ expect { access.send(:ensure_project_on_push!, cmd) }
+ .to change { Project.count }.by(1)
+ .and change { Project.where(namespace: user.namespace, name: project_path).count }.by(1)
+ end
+ end
+
+ context 'when user cannot create project in namespace' do
+ let(:user2) { create(:user) }
+ let(:namespace_path) { user2.namespace.path }
+
+ it 'does not create a new project' do
+ expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ end
+ end
+ end
+
+ context 'when authentication abilities do not include push code' do
+ let(:authentication_abilities) { [] }
+
+ context 'when user can create project in namespace' do
+ let(:namespace_path) { user.namespace.path }
+
+ it 'does not create a new project' do
+ expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ end
+ end
+ end
+ end
+
+ context 'when check contains actual changes' do
+ let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
+
+ it 'does not create a new project' do
+ expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ end
+ end
+ end
+
+ context 'when project exists' do
+ let(:changes) { Gitlab::GitAccess::ANY }
+ let!(:project) { create(:project) }
+
+ it 'does not create a new project' do
+ expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ end
+ end
+
+ context 'when deploy key is used' do
+ let(:key) { create(:deploy_key, user: user) }
+ let(:actor) { key }
+ let(:project_path) { "nonexistent" }
+ let(:project) { nil }
+ let(:namespace_path) { user.namespace.path }
+ let(:changes) { Gitlab::GitAccess::ANY }
+
+ it 'does not create a new project' do
+ expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ end
+ end
+ end
+
+ context 'when pull' do
+ let(:cmd) { 'git-upload-pack' }
+ let(:changes) { Gitlab::GitAccess::ANY }
+
+ context 'when project does not exist' do
+ let(:project_path) { "new-project" }
+ let(:namespace_path) { user.namespace.path }
+ let(:project) { nil }
+
+ it 'does not create a new project' do
+ expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ end
+ end
+ end
+ end
+
+ def raise_not_found
+ raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:project_not_found])
+ end
+end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index a29c56c598f..7c09fc5cc79 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -10,7 +10,7 @@ describe Gitlab::GitAccess do
let(:actor) { user }
let(:project) { create(:project, :repository) }
- let(:project_path) { project.path }
+ let(:project_path) { project&.path }
let(:namespace_path) { project&.namespace&.path }
let(:protocol) { 'ssh' }
let(:authentication_abilities) { %i[read_project download_code push_code] }
@@ -89,13 +89,14 @@ describe Gitlab::GitAccess do
end
end
- context 'when namespace does not exist' do
+ context 'when namespace and project are nil' do
+ let(:project) { nil }
let(:namespace_path) { nil }
it 'does not allow push and pull access' do
aggregate_failures do
- expect { push_access_check }.to raise_not_found
- expect { pull_access_check }.to raise_not_found
+ expect { push_access_check }.to raise_namespace_not_found
+ expect { pull_access_check }.to raise_namespace_not_found
end
end
end
@@ -227,6 +228,7 @@ describe Gitlab::GitAccess do
context 'when the project is nil' do
let(:project) { nil }
let(:project_path) { "new-project" }
+ let(:namespace_path) { user.namespace.path }
it 'blocks push and pull with "not found"' do
aggregate_failures do
@@ -234,42 +236,6 @@ describe Gitlab::GitAccess do
expect { push_access_check }.to raise_not_found
end
end
-
- context 'when user is allowed to create project in namespace' do
- let(:namespace_path) { user.namespace.path }
- let(:access) do
- described_class.new(actor, nil,
- protocol, authentication_abilities: authentication_abilities,
- repository_path: project_path, namespace_path: namespace_path,
- redirected_path: redirected_path)
- end
-
- it 'blocks pull access with "not found"' do
- expect { pull_access_check }.to raise_not_found
- end
-
- it 'allows push access' do
- expect { push_access_check }.not_to raise_error
- end
- end
-
- context 'when user is not allowed to create project in namespace' do
- let(:user2) { create(:user) }
- let(:namespace_path) { user2.namespace.path }
- let(:access) do
- described_class.new(actor, nil,
- protocol, authentication_abilities: authentication_abilities,
- repository_path: project_path, namespace_path: namespace_path,
- redirected_path: redirected_path)
- end
-
- it 'blocks push and pull with "not found"' do
- aggregate_failures do
- expect { pull_access_check }.to raise_not_found
- expect { push_access_check }.to raise_not_found
- end
- end
- end
end
end
@@ -449,106 +415,6 @@ describe Gitlab::GitAccess do
end
end
- describe '#ensure_project_on_push!' do
- let(:access) do
- described_class.new(actor, project,
- protocol, authentication_abilities: authentication_abilities,
- repository_path: project_path, namespace_path: namespace_path,
- redirected_path: redirected_path)
- end
-
- context 'when push' do
- let(:cmd) { 'git-receive-pack' }
-
- context 'when project does not exist' do
- let(:project_path) { "nonexistent" }
- let(:project) { nil }
-
- context 'when changes is _any' do
- let(:changes) { Gitlab::GitAccess::ANY }
-
- context 'when authentication abilities include push code' do
- let(:authentication_abilities) { [:push_code] }
-
- context 'when user can create project in namespace' do
- let(:namespace_path) { user.namespace.path }
-
- it 'creates a new project' do
- expect { access.send(:ensure_project_on_push!, cmd, changes) }.to change { Project.count }.by(1)
- end
- end
-
- context 'when user cannot create project in namespace' do
- let(:user2) { create(:user) }
- let(:namespace_path) { user2.namespace.path }
-
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd, changes) }.not_to change { Project.count }
- end
- end
- end
-
- context 'when authentication abilities do not include push code' do
- let(:authentication_abilities) { [] }
-
- context 'when user can create project in namespace' do
- let(:namespace_path) { user.namespace.path }
-
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd, changes) }.not_to change { Project.count }
- end
- end
- end
- end
-
- context 'when check contains actual changes' do
- let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
-
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd, changes) }.not_to change { Project.count }
- end
- end
- end
-
- context 'when project exists' do
- let(:changes) { Gitlab::GitAccess::ANY }
- let!(:project) { create(:project) }
-
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd, changes) }.not_to change { Project.count }
- end
- end
-
- context 'when deploy key is used' do
- let(:key) { create(:deploy_key, user: user) }
- let(:actor) { key }
- let(:project_path) { "nonexistent" }
- let(:project) { nil }
- let(:namespace_path) { user.namespace.path }
- let(:changes) { Gitlab::GitAccess::ANY }
-
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd, changes) }.not_to change { Project.count }
- end
- end
- end
-
- context 'when pull' do
- let(:cmd) { 'git-upload-pack' }
- let(:changes) { Gitlab::GitAccess::ANY }
-
- context 'when project does not exist' do
- let(:project_path) { "new-project" }
- let(:namespace_path) { user.namespace.path }
- let(:project) { nil }
-
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd, changes) }.not_to change { Project.count }
- end
- end
- end
- end
-
describe '#check_download_access!' do
it 'allows maintainers to pull' do
project.add_maintainer(user)
@@ -1219,6 +1085,10 @@ describe Gitlab::GitAccess do
raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:project_not_found])
end
+ def raise_namespace_not_found
+ raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:namespace_not_found])
+ end
+
def build_authentication_abilities
[
:read_project,
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 00182983418..29a5ef0d2fc 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -124,15 +124,20 @@ describe Gitlab::GitalyClient::CommitService do
let(:left_commit_id) { 'master' }
let(:right_commit_id) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
- it 'sends an RPC request' do
+ it 'sends an RPC request and returns the stats' do
request = Gitaly::DiffStatsRequest.new(repository: repository_message,
left_commit_id: left_commit_id,
right_commit_id: right_commit_id)
+ diff_stat_response = Gitaly::DiffStatsResponse.new(
+ stats: [{ additions: 1, deletions: 2, path: 'test' }])
+
expect_any_instance_of(Gitaly::DiffService::Stub).to receive(:diff_stats)
- .with(request, kind_of(Hash)).and_return([])
+ .with(request, kind_of(Hash)).and_return([diff_stat_response])
+
+ returned_value = described_class.new(repository).diff_stats(left_commit_id, right_commit_id)
- described_class.new(repository).diff_stats(left_commit_id, right_commit_id)
+ expect(returned_value).to eq(diff_stat_response.stats)
end
end
@@ -321,4 +326,59 @@ describe Gitlab::GitalyClient::CommitService do
client.find_commits(order: 'default', author: "Billy Baggins <bilbo@shire.com>")
end
end
+
+ describe '#commits_by_message' do
+ shared_examples 'a CommitsByMessageRequest' do
+ let(:commits) { create_list(:gitaly_commit, 2) }
+
+ before do
+ request = Gitaly::CommitsByMessageRequest.new(
+ repository: repository_message,
+ query: query,
+ revision: (options[:revision] || '').dup.force_encoding(Encoding::ASCII_8BIT),
+ path: (options[:path] || '').dup.force_encoding(Encoding::ASCII_8BIT),
+ limit: (options[:limit] || 1000).to_i,
+ offset: (options[:offset] || 0).to_i
+ )
+
+ allow_any_instance_of(Gitaly::CommitService::Stub)
+ .to receive(:commits_by_message)
+ .with(request, kind_of(Hash))
+ .and_return([Gitaly::CommitsByMessageResponse.new(commits: commits)])
+ end
+
+ it 'sends an RPC request with the correct payload' do
+ expect(client.commits_by_message(query, options)).to match_array(wrap_commits(commits))
+ end
+ end
+
+ let(:query) { 'Add a feature' }
+ let(:options) { {} }
+
+ context 'when only the query is provided' do
+ include_examples 'a CommitsByMessageRequest'
+ end
+
+ context 'when all arguments are provided' do
+ let(:options) { { revision: 'feature-branch', path: 'foo.txt', limit: 10, offset: 20 } }
+
+ include_examples 'a CommitsByMessageRequest'
+ end
+
+ context 'when limit and offset are not integers' do
+ let(:options) { { limit: '10', offset: '60' } }
+
+ include_examples 'a CommitsByMessageRequest'
+ end
+
+ context 'when revision and path contain non-ASCII characters' do
+ let(:options) { { revision: "branch\u011F", path: "foo/\u011F.txt" } }
+
+ include_examples 'a CommitsByMessageRequest'
+ end
+
+ def wrap_commits(commits)
+ commits.map { |commit| Gitlab::Git::Commit.new(repository, commit) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 2c6aee58326..c2b989c2fdc 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -21,10 +21,10 @@ describe Gitlab::GitalyClient do
describe '.query_time', :request_store do
it 'increments query times' do
- subject.query_time += 0.451
- subject.query_time += 0.322
+ subject.add_query_time(0.4510004)
+ subject.add_query_time(0.3220004)
- expect(subject.query_time).to eq(0.773)
+ expect(subject.query_time).to eq(0.773001)
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index e2d810d5ddc..526a5589743 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -154,9 +154,11 @@ describe Gitlab::GithubImport::Importer::PullRequestsImporter do
.to receive(:fetch_remote)
.with('github', forced: false)
- expect(Rails.logger)
- .to receive(:info)
- .with(an_instance_of(String))
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(an_instance_of(Hash))
+ end
expect(importer.repository_updates_counter)
.to receive(:increment)
diff --git a/spec/lib/gitlab/gl_repository/identifier_spec.rb b/spec/lib/gitlab/gl_repository/identifier_spec.rb
new file mode 100644
index 00000000000..c36f296702e
--- /dev/null
+++ b/spec/lib/gitlab/gl_repository/identifier_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::GlRepository::Identifier do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) }
+ let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) }
+
+ describe 'project repository' do
+ it_behaves_like 'parsing gl_repository identifier' do
+ let(:record_id) { project.id }
+ let(:identifier) { "project-#{record_id}" }
+ let(:expected_container) { project }
+ let(:expected_type) { Gitlab::GlRepository::PROJECT }
+ end
+ end
+
+ describe 'wiki' do
+ it_behaves_like 'parsing gl_repository identifier' do
+ let(:record_id) { project.id }
+ let(:identifier) { "wiki-#{record_id}" }
+ let(:expected_container) { project }
+ let(:expected_type) { Gitlab::GlRepository::WIKI }
+ end
+ end
+
+ describe 'snippet' do
+ context 'when PersonalSnippet' do
+ it_behaves_like 'parsing gl_repository identifier' do
+ let(:record_id) { personal_snippet.id }
+ let(:identifier) { "snippet-#{record_id}" }
+ let(:expected_container) { personal_snippet }
+ let(:expected_type) { Gitlab::GlRepository::SNIPPET }
+ end
+ end
+
+ context 'when ProjectSnippet' do
+ it_behaves_like 'parsing gl_repository identifier' do
+ let(:record_id) { project_snippet.id }
+ let(:identifier) { "snippet-#{record_id}" }
+ let(:expected_container) { project_snippet }
+ let(:expected_type) { Gitlab::GlRepository::SNIPPET }
+ end
+ end
+ end
+
+ describe 'design' do
+ it_behaves_like 'parsing gl_repository identifier' do
+ let(:record_id) { project.id }
+ let(:identifier) { "design-#{project.id}" }
+ let(:expected_container) { project }
+ let(:expected_type) { Gitlab::GlRepository::DESIGN }
+ end
+ end
+
+ describe 'incorrect format' do
+ def expect_error_raised_for(identifier)
+ expect { described_class.new(identifier) }.to raise_error(ArgumentError)
+ end
+
+ it 'raises error for incorrect id' do
+ expect_error_raised_for('wiki-noid')
+ end
+
+ it 'raises error for incorrect type' do
+ expect_error_raised_for('foo-2')
+ end
+
+ it 'raises error for incorrect three-segment container' do
+ expect_error_raised_for('snippet-2-wiki')
+ end
+
+ it 'raises error for one segment' do
+ expect_error_raised_for('snippet')
+ end
+
+ it 'raises error for more than three segments' do
+ expect_error_raised_for('project-1-wiki-bar')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index bf6df55b71e..f5270104d2f 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -13,7 +13,7 @@ describe Gitlab::GlRepository::RepoType do
describe Gitlab::GlRepository::PROJECT do
it_behaves_like 'a repo type' do
- let(:expected_id) { project.id.to_s }
+ let(:expected_id) { project.id }
let(:expected_identifier) { "project-#{expected_id}" }
let(:expected_suffix) { '' }
let(:expected_container) { project }
@@ -42,7 +42,7 @@ describe Gitlab::GlRepository::RepoType do
describe Gitlab::GlRepository::WIKI do
it_behaves_like 'a repo type' do
- let(:expected_id) { project.id.to_s }
+ let(:expected_id) { project.id }
let(:expected_identifier) { "wiki-#{expected_id}" }
let(:expected_suffix) { '.wiki' }
let(:expected_container) { project }
@@ -72,7 +72,7 @@ describe Gitlab::GlRepository::RepoType do
describe Gitlab::GlRepository::SNIPPET do
context 'when PersonalSnippet' do
it_behaves_like 'a repo type' do
- let(:expected_id) { personal_snippet.id.to_s }
+ let(:expected_id) { personal_snippet.id }
let(:expected_identifier) { "snippet-#{expected_id}" }
let(:expected_suffix) { '' }
let(:expected_repository) { personal_snippet.repository }
@@ -101,7 +101,7 @@ describe Gitlab::GlRepository::RepoType do
context 'when ProjectSnippet' do
it_behaves_like 'a repo type' do
- let(:expected_id) { project_snippet.id.to_s }
+ let(:expected_id) { project_snippet.id }
let(:expected_identifier) { "snippet-#{expected_id}" }
let(:expected_suffix) { '' }
let(:expected_repository) { project_snippet.repository }
@@ -131,7 +131,7 @@ describe Gitlab::GlRepository::RepoType do
describe Gitlab::GlRepository::DESIGN do
it_behaves_like 'a repo type' do
let(:expected_identifier) { "design-#{project.id}" }
- let(:expected_id) { project.id.to_s }
+ let(:expected_id) { project.id }
let(:expected_suffix) { '.design' }
let(:expected_repository) { project.design_repository }
let(:expected_container) { project }
diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb
index 5f5244b7116..413540b4db8 100644
--- a/spec/lib/gitlab/gl_repository_spec.rb
+++ b/spec/lib/gitlab/gl_repository_spec.rb
@@ -11,7 +11,7 @@ describe ::Gitlab::GlRepository do
expect(described_class.parse("project-#{project.id}")).to eq([project, project, Gitlab::GlRepository::PROJECT])
end
- it 'parses a wiki gl_repository' do
+ it 'parses a project wiki gl_repository' do
expect(described_class.parse("wiki-#{project.id}")).to eq([project, project, Gitlab::GlRepository::WIKI])
end
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index 1ff2334bacf..7b0e0d01257 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -12,21 +12,19 @@ describe Gitlab::GonHelper do
describe '#push_frontend_feature_flag' do
it 'pushes a feature flag to the frontend' do
gon = instance_double('gon')
+ thing = stub_feature_flag_gate('thing')
+
+ stub_feature_flags(my_feature_flag: thing)
allow(helper)
.to receive(:gon)
.and_return(gon)
- expect(Feature)
- .to receive(:enabled?)
- .with(:my_feature_flag, 10)
- .and_return(true)
-
expect(gon)
.to receive(:push)
.with({ features: { 'myFeatureFlag' => true } }, true)
- helper.push_frontend_feature_flag(:my_feature_flag, 10)
+ helper.push_frontend_feature_flag(:my_feature_flag, thing)
end
end
diff --git a/spec/lib/gitlab/graphql/pagination/filterable_array_connection_spec.rb b/spec/lib/gitlab/graphql/pagination/filterable_array_connection_spec.rb
deleted file mode 100644
index c82e3ad3019..00000000000
--- a/spec/lib/gitlab/graphql/pagination/filterable_array_connection_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::Graphql::Pagination::FilterableArrayConnection do
- let(:callback) { proc { |nodes| nodes } }
- let(:all_nodes) { Gitlab::Graphql::FilterableArray.new(callback, 1, 2, 3, 4, 5) }
- let(:arguments) { {} }
-
- subject(:connection) do
- described_class.new(all_nodes, { max_page_size: 3 }.merge(arguments))
- end
-
- describe '#nodes' do
- let(:paged_nodes) { subject.nodes }
-
- it_behaves_like 'connection with paged nodes' do
- let(:paged_nodes_size) { 3 }
- end
-
- context 'when callback filters some nodes' do
- let(:callback) { proc { |nodes| nodes[1..-1] } }
-
- it 'does not return filtered elements' do
- expect(subject.nodes).to contain_exactly(all_nodes[1], all_nodes[2])
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index ba77bc95bb5..ed728444b17 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -9,14 +9,6 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
- before do
- stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
- NoPrimaryKey.class_eval do
- self.table_name = 'no_primary_key'
- self.primary_key = nil
- end
- end
-
subject(:connection) do
described_class.new(nodes, { context: context, max_page_size: 3 }.merge(arguments))
end
@@ -41,7 +33,7 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.order(:updated_at) }
it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
end
it 'includes the :id even when not specified in the order' do
@@ -53,7 +45,7 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.order(:updated_at).order(:created_at) }
it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
end
end
@@ -61,7 +53,7 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s)
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.strftime('%Y-%m-%d %H:%M:%S.%N %Z'))
end
end
end
@@ -303,6 +295,14 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
context 'when there is no primary key' do
+ before do
+ stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
+ NoPrimaryKey.class_eval do
+ self.table_name = 'no_primary_key'
+ self.primary_key = nil
+ end
+ end
+
let(:nodes) { NoPrimaryKey.all }
it 'raises an error' do
@@ -311,4 +311,96 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
end
end
+
+ describe '#has_previous_page and #has_next_page' do
+ # using a list of 5 items with a max_page of 3
+ let_it_be(:project_list) { create_list(:project, 5) }
+ let_it_be(:nodes) { Project.order(:id) }
+
+ context 'when default query' do
+ let(:arguments) { {} }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before is first item' do
+ let(:arguments) { { before: encoded_cursor(project_list.first) } }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ describe 'using `before`' do
+ context 'when before is the last item' do
+ let(:arguments) { { before: encoded_cursor(project_list.last) } }
+
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last specified' do
+ let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does not request all remaining nodes' do
+ let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ expect(subject.nodes).to eq [project_list[0]]
+ end
+ end
+ end
+
+ describe 'using `after`' do
+ context 'when after is the first item' do
+ let(:arguments) { { after: encoded_cursor(project_list.first) } }
+
+ it 'has a previous, and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when after and first specified' do
+ let(:arguments) { { after: encoded_cursor(project_list.first), first: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { after: encoded_cursor(project_list[2]), last: 3 } }
+
+ it 'has a previous but no next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_falsey
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import/merge_request_creator_spec.rb b/spec/lib/gitlab/import/merge_request_creator_spec.rb
index ff2c3032dbf..48a57f9b251 100644
--- a/spec/lib/gitlab/import/merge_request_creator_spec.rb
+++ b/spec/lib/gitlab/import/merge_request_creator_spec.rb
@@ -15,7 +15,7 @@ describe Gitlab::Import::MergeRequestCreator do
it 'updates the data' do
commits_count = commits.count
- merge_request.merge_request_diffs.destroy_all # rubocop: disable DestroyAll
+ merge_request.merge_request_diffs.destroy_all # rubocop: disable Cop/DestroyAll
expect(merge_request.merge_request_diffs.count).to eq(0)
diff --git a/spec/lib/gitlab/import/set_async_jid_spec.rb b/spec/lib/gitlab/import/set_async_jid_spec.rb
index f1ae23a4a6a..d2933cfd371 100644
--- a/spec/lib/gitlab/import/set_async_jid_spec.rb
+++ b/spec/lib/gitlab/import/set_async_jid_spec.rb
@@ -8,7 +8,7 @@ describe Gitlab::Import::SetAsyncJid do
it 'sets the JID in Redis' do
expect(Gitlab::SidekiqStatus)
.to receive(:set)
- .with("async-import/project-import-state/#{project.id}", StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
+ .with("async-import/project-import-state/#{project.id}", Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
.and_call_original
described_class.set_jid(project.import_state)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index c78b4501310..ef9321dc1fc 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -179,10 +179,12 @@ merge_request_context_commits:
ci_pipelines:
- project
- user
+- ci_ref
- stages
- statuses
- latest_statuses_ordered_by_stage
- builds
+- bridges
- processables
- trigger_requests
- variables
@@ -195,7 +197,7 @@ ci_pipelines:
- cancelable_statuses
- manual_actions
- scheduled_actions
-- artifacts
+- downloadable_artifacts
- pipeline_schedule
- merge_requests_as_head_pipeline
- merge_request
@@ -220,6 +222,11 @@ ci_pipelines:
- pipeline_config
- security_scans
- daily_build_group_report_results
+- latest_builds
+- daily_report_results
+ci_refs:
+- project
+- ci_pipelines
pipeline_variables:
- pipeline
stages:
@@ -236,6 +243,7 @@ statuses:
- stage
- user
- auto_canceled_by
+- needs
variables:
- project
triggers:
@@ -417,6 +425,7 @@ project:
- deploy_tokens
- settings
- ci_cd_settings
+- project_settings
- import_export_upload
- repository_languages
- pool_repository
@@ -479,6 +488,7 @@ project:
- upstream_project_subscriptions
- downstream_project_subscriptions
- service_desk_setting
+- security_setting
- import_failures
- container_expiration_policy
- resource_groups
@@ -494,6 +504,7 @@ project:
- repository_storage_moves
- freeze_periods
- webex_teams_service
+- build_report_results
award_emoji:
- awardable
- user
@@ -579,6 +590,7 @@ boards:
- board_assignee
- assignee
- labels
+- user_preferences
lists:
- user
- milestone
@@ -596,6 +608,7 @@ design: &design
- versions
- notes
- user_mentions
+- events
designs: *design
actions:
- design
diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
new file mode 100644
index 00000000000..d6217811b9c
--- /dev/null
+++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ImportExport::AttributesPermitter do
+ let(:yml_config) do
+ <<-EOF
+ tree:
+ project:
+ - labels:
+ - :priorities
+ - milestones:
+ - events:
+ - :push_event_payload
+
+ included_attributes:
+ labels:
+ - :title
+ - :description
+
+ methods:
+ labels:
+ - :type
+ EOF
+ end
+
+ let(:file) { Tempfile.new(%w(import_export .yml)) }
+ let(:config_hash) { Gitlab::ImportExport::Config.new(config: file.path).to_h }
+
+ before do
+ file.write(yml_config)
+ file.rewind
+ end
+
+ after do
+ file.close
+ file.unlink
+ end
+
+ subject { described_class.new(config: config_hash) }
+
+ describe '#permitted_attributes' do
+ it 'builds permitted attributes hash' do
+ expect(subject.permitted_attributes).to match(
+ a_hash_including(
+ project: [:labels, :milestones],
+ labels: [:priorities, :title, :description, :type],
+ events: [:push_event_payload],
+ milestones: [:events],
+ priorities: [],
+ push_event_payload: []
+ )
+ )
+ end
+ end
+
+ describe '#permit' do
+ let(:unfiltered_hash) do
+ {
+ title: 'Title',
+ description: 'Description',
+ undesired_attribute: 'Undesired Attribute',
+ another_attribute: 'Another Attribute'
+ }
+ end
+
+ it 'only allows permitted attributes' do
+ expect(subject.permit(:labels, unfiltered_hash)).to eq(title: 'Title', description: 'Description')
+ end
+ end
+
+ describe '#permitted_attributes_for' do
+ it 'returns an array of permitted attributes for a relation' do
+ expect(subject.permitted_attributes_for(:labels)).to contain_exactly(:title, :description, :type, :priorities)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 038b95809b4..c5a7327332e 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -10,61 +10,66 @@ require 'spec_helper'
describe 'Test coverage of the Project Import' do
include ConfigurationHelper
- # `MUTED_RELATIONS` is a technical debt.
+ # `muted_relations` is a technical debt.
# This list expected to be empty or used as a workround
# in case this spec blocks an important urgent MR.
# It is also expected that adding a relation in the list should lead to
# opening a follow-up issue to fix this.
- MUTED_RELATIONS = %w[
- project.milestones.events.push_event_payload
- project.issues.events
- project.issues.events.push_event_payload
- project.issues.notes.events
- project.issues.notes.events.push_event_payload
- project.issues.milestone.events.push_event_payload
- project.issues.issue_milestones
- project.issues.issue_milestones.milestone
- project.issues.resource_label_events.label.priorities
- project.issues.designs.notes
- project.issues.designs.notes.author
- project.issues.designs.notes.events
- project.issues.designs.notes.events.push_event_payload
- project.merge_requests.metrics
- project.merge_requests.notes.events.push_event_payload
- project.merge_requests.events.push_event_payload
- project.merge_requests.timelogs
- project.merge_requests.label_links
- project.merge_requests.label_links.label
- project.merge_requests.label_links.label.priorities
- project.merge_requests.milestone
- project.merge_requests.milestone.events
- project.merge_requests.milestone.events.push_event_payload
- project.merge_requests.merge_request_milestones
- project.merge_requests.merge_request_milestones.milestone
- project.merge_requests.resource_label_events.label
- project.merge_requests.resource_label_events.label.priorities
- project.ci_pipelines.notes.events
- project.ci_pipelines.notes.events.push_event_payload
- project.protected_branches.unprotect_access_levels
- project.prometheus_metrics
- project.metrics_setting
- project.boards.lists.label.priorities
- project.service_desk_setting
- ].freeze
+ let(:muted_relations) do
+ %w[
+ project.milestones.events.push_event_payload
+ project.issues.events
+ project.issues.events.push_event_payload
+ project.issues.notes.events
+ project.issues.notes.events.push_event_payload
+ project.issues.milestone.events.push_event_payload
+ project.issues.issue_milestones
+ project.issues.issue_milestones.milestone
+ project.issues.resource_label_events.label.priorities
+ project.issues.designs.notes
+ project.issues.designs.notes.author
+ project.issues.designs.notes.events
+ project.issues.designs.notes.events.push_event_payload
+ project.merge_requests.metrics
+ project.merge_requests.notes.events.push_event_payload
+ project.merge_requests.events.push_event_payload
+ project.merge_requests.timelogs
+ project.merge_requests.label_links
+ project.merge_requests.label_links.label
+ project.merge_requests.label_links.label.priorities
+ project.merge_requests.milestone
+ project.merge_requests.milestone.events
+ project.merge_requests.milestone.events.push_event_payload
+ project.merge_requests.merge_request_milestones
+ project.merge_requests.merge_request_milestones.milestone
+ project.merge_requests.resource_label_events.label
+ project.merge_requests.resource_label_events.label.priorities
+ project.ci_pipelines.notes.events
+ project.ci_pipelines.notes.events.push_event_payload
+ project.protected_branches.unprotect_access_levels
+ project.prometheus_metrics
+ project.metrics_setting
+ project.boards.lists.label.priorities
+ project.service_desk_setting
+ project.security_setting
+ ].freeze
+ end
# A list of JSON fixture files we use to test Import.
# Most of the relations are present in `complex/project.json`
# which is our main fixture.
- PROJECT_JSON_FIXTURES = [
- 'spec/fixtures/lib/gitlab/import_export/complex/project.json',
- 'spec/fixtures/lib/gitlab/import_export/group/project.json',
- 'spec/fixtures/lib/gitlab/import_export/light/project.json',
- 'spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json',
- 'spec/fixtures/lib/gitlab/import_export/designs/project.json'
- ].freeze
+ let(:project_json_fixtures) do
+ [
+ 'spec/fixtures/lib/gitlab/import_export/complex/project.json',
+ 'spec/fixtures/lib/gitlab/import_export/group/project.json',
+ 'spec/fixtures/lib/gitlab/import_export/light/project.json',
+ 'spec/fixtures/lib/gitlab/import_export/milestone-iid/project.json',
+ 'spec/fixtures/lib/gitlab/import_export/designs/project.json'
+ ].freeze
+ end
it 'ensures that all imported/exported relations are present in test JSONs' do
- not_tested_relations = (relations_from_config - tested_relations) - MUTED_RELATIONS
+ not_tested_relations = (relations_from_config - tested_relations) - muted_relations
expect(not_tested_relations).to be_empty, failure_message(not_tested_relations)
end
@@ -76,7 +81,7 @@ describe 'Test coverage of the Project Import' do
end
def tested_relations
- PROJECT_JSON_FIXTURES.flat_map(&method(:relations_from_json)).to_set
+ project_json_fixtures.flat_map(&method(:relations_from_json)).to_set
end
def relations_from_json(json_file)
@@ -106,7 +111,7 @@ describe 'Test coverage of the Project Import' do
These relations seem to be added recenty and
they expected to be covered in our Import specs: #{not_tested_relations}.
- To do that, expand one of the files listed in `PROJECT_JSON_FIXTURES`
+ To do that, expand one of the files listed in `project_json_fixtures`
(or expand the list if you consider adding a new fixture file).
After that, add a new spec into
@@ -114,7 +119,7 @@ describe 'Test coverage of the Project Import' do
to check that the relation is being imported correctly.
In case the spec breaks the master or there is a sense of urgency,
- you could include the relations into the `MUTED_RELATIONS` list.
+ you could include the relations into the `muted_relations` list.
Muting relations is considered to be a temporary solution, so please
open a follow-up issue and try to fix that when it is possible.
diff --git a/spec/lib/gitlab/import_export/importer_spec.rb b/spec/lib/gitlab/import_export/importer_spec.rb
index 60179146416..494f7e3a00d 100644
--- a/spec/lib/gitlab/import_export/importer_spec.rb
+++ b/spec/lib/gitlab/import_export/importer_spec.rb
@@ -18,6 +18,7 @@ describe Gitlab::ImportExport::Importer do
FileUtils.mkdir_p(shared.export_path)
ImportExportUpload.create(project: project, import_file: import_file)
+ allow(FileUtils).to receive(:rm_rf).and_call_original
end
after do
@@ -78,6 +79,13 @@ describe Gitlab::ImportExport::Importer do
expect(project.import_export_upload.import_file&.file).to be_nil
end
+ it 'removes tmp files' do
+ importer.execute
+
+ expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
+ expect(Dir.exist?(shared.base_path)).to eq(false)
+ end
+
it 'sets the correct visibility_level when visibility level is a string' do
project.create_or_update_import_data(
data: { override_params: { visibility_level: Gitlab::VisibilityLevel::PRIVATE.to_s } }
@@ -89,6 +97,49 @@ describe Gitlab::ImportExport::Importer do
end
end
+ context 'when import fails' do
+ let(:error_message) { 'foo' }
+
+ shared_examples 'removes any non migrated snippet' do
+ specify do
+ create_list(:project_snippet, 2, project: project)
+ snippet_with_repo = create(:project_snippet, :repository, project: project)
+
+ expect { importer.execute }.to change(Snippet, :count).by(-2).and(raise_error(Projects::ImportService::Error))
+
+ expect(snippet_with_repo.reload).to be_present
+ end
+ end
+
+ context 'when there is a graceful error' do
+ before do
+ allow_next_instance_of(Gitlab::ImportExport::AvatarRestorer) do |instance|
+ allow(instance).to receive(:avatar_export_file).and_raise(StandardError, error_message)
+ end
+ end
+
+ it 'raises and exception' do
+ expect { importer.execute }.to raise_error(Projects::ImportService::Error, error_message)
+ end
+
+ it_behaves_like 'removes any non migrated snippet'
+ end
+
+ context 'when an unexpected exception is raised' do
+ before do
+ allow_next_instance_of(Gitlab::ImportExport::AvatarRestorer) do |instance|
+ allow(instance).to receive(:restore).and_raise(StandardError, error_message)
+ end
+ end
+
+ it 'captures it and raises the Projects::ImportService::Error exception' do
+ expect { importer.execute }.to raise_error(Projects::ImportService::Error, error_message)
+ end
+
+ it_behaves_like 'removes any non migrated snippet'
+ end
+ end
+
context 'when project successfully restored' do
context "with a project in a user's namespace" do
let!(:existing_project) { create(:project, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 076f454895f..30f8280fda3 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -95,4 +95,26 @@ describe Gitlab::ImportExport::JSON::StreamingSerializer do
end
end
end
+
+ describe '.batch_size' do
+ context 'when export_reduce_relation_batch_size feature flag is enabled' do
+ before do
+ stub_feature_flags(export_reduce_relation_batch_size: true)
+ end
+
+ it 'returns 20' do
+ expect(described_class.batch_size(exportable)).to eq(described_class::SMALLER_BATCH_SIZE)
+ end
+ end
+
+ context 'when export_reduce_relation_batch_size feature flag is disabled' do
+ before do
+ stub_feature_flags(export_reduce_relation_batch_size: false)
+ end
+
+ it 'returns default batch size' do
+ expect(described_class.batch_size(exportable)).to eq(described_class::BATCH_SIZE)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
index 958865f52a0..6562aa5b8a6 100644
--- a/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/legacy_relation_tree_saver_spec.rb
@@ -8,17 +8,35 @@ describe Gitlab::ImportExport::LegacyRelationTreeSaver do
let(:tree) { {} }
describe '#serialize' do
- let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
+ shared_examples 'FastHashSerializer with batch size' do |batch_size|
+ let(:serializer) { instance_double(Gitlab::ImportExport::FastHashSerializer) }
- it 'uses FastHashSerializer' do
- expect(Gitlab::ImportExport::FastHashSerializer)
- .to receive(:new)
- .with(exportable, tree)
- .and_return(serializer)
+ it 'uses FastHashSerializer' do
+ expect(Gitlab::ImportExport::FastHashSerializer)
+ .to receive(:new)
+ .with(exportable, tree, batch_size: batch_size)
+ .and_return(serializer)
- expect(serializer).to receive(:execute)
+ expect(serializer).to receive(:execute)
- relation_tree_saver.serialize(exportable, tree)
+ relation_tree_saver.serialize(exportable, tree)
+ end
+ end
+
+ context 'when export_reduce_relation_batch_size feature flag is enabled' do
+ before do
+ stub_feature_flags(export_reduce_relation_batch_size: true)
+ end
+
+ include_examples 'FastHashSerializer with batch size', Gitlab::ImportExport::JSON::StreamingSerializer::SMALLER_BATCH_SIZE
+ end
+
+ context 'when export_reduce_relation_batch_size feature flag is disabled' do
+ before do
+ stub_feature_flags(export_reduce_relation_batch_size: false)
+ end
+
+ include_examples 'FastHashSerializer with batch size', Gitlab::ImportExport::JSON::StreamingSerializer::BATCH_SIZE
end
end
end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 175da623c1b..3339129cb8f 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -18,6 +18,22 @@ describe Gitlab::ImportExport::Project::RelationFactory do
excluded_keys: excluded_keys)
end
+ before do
+ # Mocks an ActiveRecordish object with the dodgy columns
+ stub_const('FooModel', Class.new)
+ FooModel.class_eval do
+ include ActiveModel::Model
+
+ def initialize(params = {})
+ params.each { |key, value| send("#{key}=", value) }
+ end
+
+ def values
+ instance_variables.map { |ivar| instance_variable_get(ivar) }
+ end
+ end
+ end
+
context 'hook object' do
let(:relation_sym) { :hooks }
let(:id) { 999 }
@@ -83,19 +99,6 @@ describe Gitlab::ImportExport::Project::RelationFactory do
end
end
- # Mocks an ActiveRecordish object with the dodgy columns
- class FooModel
- include ActiveModel::Model
-
- def initialize(params = {})
- params.each { |key, value| send("#{key}=", value) }
- end
-
- def values
- instance_variables.map { |ivar| instance_variable_get(ivar) }
- end
- end
-
context 'merge_request object' do
let(:relation_sym) { :merge_requests }
@@ -208,11 +211,12 @@ describe Gitlab::ImportExport::Project::RelationFactory do
}
end
- class HazardousFooModel < FooModel
- attr_accessor :service_id, :moved_to_id, :namespace_id, :ci_id, :random_project_id, :random_id, :milestone_id, :project_id
- end
-
before do
+ stub_const('HazardousFooModel', Class.new(FooModel))
+ HazardousFooModel.class_eval do
+ attr_accessor :service_id, :moved_to_id, :namespace_id, :ci_id, :random_project_id, :random_id, :milestone_id, :project_id
+ end
+
allow(HazardousFooModel).to receive(:reflect_on_association).and_return(nil)
end
@@ -246,11 +250,12 @@ describe Gitlab::ImportExport::Project::RelationFactory do
Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
end
- class ProjectFooModel < FooModel
- attr_accessor(*Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES)
- end
-
before do
+ stub_const('ProjectFooModel', Class.new(FooModel))
+ ProjectFooModel.class_eval do
+ attr_accessor(*Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES)
+ end
+
allow(ProjectFooModel).to receive(:reflect_on_association).and_return(nil)
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 58589a7bbbe..867dc37c5c5 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
@project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
@shared = @project.import_export_shared
- allow(Feature).to receive(:enabled?) { true }
+ stub_all_feature_flags
stub_feature_flags(project_import_ndjson: ndjson_enabled)
setup_import_export_config('complex')
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index b9bfe253f10..533d1097928 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -29,7 +29,7 @@ describe Gitlab::ImportExport::Project::TreeSaver do
before_all do
RSpec::Mocks.with_temporary_scope do
- allow(Feature).to receive(:enabled?) { true }
+ stub_all_feature_flags
stub_feature_flags(project_export_as_ndjson: ndjson_enabled)
project.add_maintainer(user)
diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
index a61d966bdfa..d5839589633 100644
--- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
@@ -13,11 +13,8 @@ describe Gitlab::ImportExport::RepoRestorer do
let(:shared) { project.import_export_shared }
let(:bundler) { Gitlab::ImportExport::RepoSaver.new(project: project_with_repo, shared: shared) }
let(:bundle_path) { File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename) }
- let(:restorer) do
- described_class.new(path_to_bundle: bundle_path,
- shared: shared,
- project: project)
- end
+
+ subject { described_class.new(path_to_bundle: bundle_path, shared: shared, project: project) }
before do
allow_next_instance_of(Gitlab::ImportExport) do |instance|
@@ -36,7 +33,25 @@ describe Gitlab::ImportExport::RepoRestorer do
end
it 'restores the repo successfully' do
- expect(restorer.restore).to be_truthy
+ expect(subject.restore).to be_truthy
+ end
+
+ context 'when the repository creation fails' do
+ before do
+ allow_next_instance_of(Repositories::DestroyService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+ end
+
+ it 'logs the error' do
+ allow(project.repository)
+ .to receive(:create_from_bundle)
+ .and_raise('9:CreateRepositoryFromBundle: target directory is non-empty')
+
+ expect(shared).to receive(:error).and_call_original
+
+ expect(subject.restore).to be_falsey
+ end
end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index c29a85ce624..0d112bfdb2a 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -144,6 +144,7 @@ Releases::Link:
- url
- name
- filepath
+- link_type
- created_at
- updated_at
ProjectMember:
@@ -471,6 +472,7 @@ Service:
- properties
- template
- instance
+- alert_events
- push_events
- issues_events
- commit_events
@@ -701,6 +703,8 @@ Badge:
- type
ProjectCiCdSetting:
- group_runners_enabled
+ProjectSetting:
+- allow_merge_on_skipped_pipeline
ProtectedEnvironment:
- id
- project_id
@@ -749,6 +753,7 @@ ProjectMetricsSetting:
- external_dashboard_url
- created_at
- updated_at
+- dashboard_timezone
Board:
- id
- project_id
@@ -861,3 +866,11 @@ SystemNoteMetadata:
- action
- created_at
- updated_at
+ProjectSecuritySetting:
+ - project_id
+ - auto_fix_container_scanning
+ - auto_fix_dast
+ - auto_fix_dependency_scanning
+ - auto_fix_sast
+ - created_at
+ - updated_at
diff --git a/spec/lib/gitlab/import_export/saver_spec.rb b/spec/lib/gitlab/import_export/saver_spec.rb
index a59cf7a1260..18e9d7da32d 100644
--- a/spec/lib/gitlab/import_export/saver_spec.rb
+++ b/spec/lib/gitlab/import_export/saver_spec.rb
@@ -5,18 +5,21 @@ require 'fileutils'
describe Gitlab::ImportExport::Saver do
let!(:project) { create(:project, :public, name: 'project') }
- let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let(:base_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let(:export_path) { "#{base_path}/project_tree_saver_spec/export" }
let(:shared) { project.import_export_shared }
subject { described_class.new(exportable: project, shared: shared) }
before do
+ allow(shared).to receive(:base_path).and_return(base_path)
allow_next_instance_of(Gitlab::ImportExport) do |instance|
allow(instance).to receive(:storage_path).and_return(export_path)
end
FileUtils.mkdir_p(shared.export_path)
FileUtils.touch("#{shared.export_path}/tmp.bundle")
+ allow(FileUtils).to receive(:rm_rf).and_call_original
end
after do
@@ -31,4 +34,11 @@ describe Gitlab::ImportExport::Saver do
expect(ImportExportUpload.find_by(project: project).export_file.url)
.to match(%r[\/uploads\/-\/system\/import_export_upload\/export_file.*])
end
+
+ it 'removes tmp files' do
+ subject.save
+
+ expect(FileUtils).to have_received(:rm_rf).with(base_path)
+ expect(Dir.exist?(base_path)).to eq(false)
+ end
end
diff --git a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
index 3ce950d6a64..779b65e33d8 100644
--- a/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippet_repo_restorer_spec.rb
@@ -25,16 +25,24 @@ describe Gitlab::ImportExport::SnippetRepoRestorer do
expect(snippet.repository_exists?).to be_falsey
aggregate_failures do
- expect(restorer.restore).to be_truthy
-
- expect(snippet.repository_exists?).to be_truthy
- expect(snippet.snippet_repository).not_to be_nil
+ expect do
+ expect(restorer.restore).to be_truthy
+ end.to change { SnippetRepository.count }.by(1)
blob = snippet.repository.blob_at('HEAD', snippet.file_name)
expect(blob).not_to be_nil
expect(blob.data).to eq(snippet.content)
end
end
+
+ context 'when the repository creation fails' do
+ it 'returns false' do
+ allow_any_instance_of(Gitlab::BackgroundMigration::BackfillSnippetRepositories).to receive(:perform_by_ids).and_return(nil)
+
+ expect(restorer.restore).to be false
+ expect(shared.errors.first).to match(/Error creating repository for snippet/)
+ end
+ end
end
context 'when the snippet does not have a bundle file path' do
diff --git a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
index 242f6f6b58c..fdae259c2f1 100644
--- a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
@@ -8,43 +8,92 @@ describe Gitlab::ImportExport::SnippetsRepoRestorer do
describe 'bundle a snippet Git repo' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
- let_it_be(:snippet_with_repo) { create(:project_snippet, :repository, project: project, author: user) }
- let_it_be(:snippet_without_repo) { create(:project_snippet, project: project, author: user) }
+ let!(:snippet1) { create(:project_snippet, project: project, author: user) }
+ let!(:snippet2) { create(:project_snippet, project: project, author: user) }
let(:shared) { project.import_export_shared }
let(:exporter) { Gitlab::ImportExport::SnippetsRepoSaver.new(current_user: user, project: project, shared: shared) }
let(:bundle_dir) { ::Gitlab::ImportExport.snippets_repo_bundle_path(shared.export_path) }
+ let(:service) { instance_double(Gitlab::ImportExport::SnippetRepoRestorer) }
let(:restorer) do
described_class.new(user: user,
shared: shared,
project: project)
end
- let(:service) { instance_double(Gitlab::ImportExport::SnippetRepoRestorer) }
-
- before do
- exporter.save
- end
after do
FileUtils.rm_rf(shared.export_path)
end
- it 'calls SnippetRepoRestorer per each snippet with the bundle path' do
- allow(service).to receive(:restore).and_return(true)
+ shared_examples 'imports snippet repositories' do
+ before do
+ snippet1.snippet_repository&.delete
+ snippet1.repository.remove
+
+ snippet2.snippet_repository&.delete
+ snippet2.repository.remove
+ end
+
+ specify do
+ expect(snippet1.repository_exists?).to be false
+ expect(snippet2.repository_exists?).to be false
+
+ expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet1, path_to_bundle: bundle_path(snippet1))).and_call_original
+ expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet2, path_to_bundle: bundle_path(snippet2))).and_call_original
+ expect(restorer.restore).to be_truthy
+
+ snippet1.repository.expire_exists_cache
+ snippet2.repository.expire_exists_cache
+
+ expect(snippet1.blobs).not_to be_empty
+ expect(snippet2.blobs).not_to be_empty
+ end
+ end
+
+ context 'when export has no snippet repository bundle' do
+ before do
+ expect(Dir.exist?(bundle_dir)).to be false
+ end
+
+ it_behaves_like 'imports snippet repositories'
+ end
+
+ context 'when export has snippet repository bundles and snippets without them' do
+ let!(:snippet1) { create(:project_snippet, :repository, project: project, author: user) }
+ let!(:snippet2) { create(:project_snippet, project: project, author: user) }
- expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet_with_repo, path_to_bundle: bundle_path(snippet_with_repo))).and_return(service)
- expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet_without_repo, path_to_bundle: bundle_path(snippet_without_repo))).and_return(service)
+ before do
+ exporter.save
- expect(restorer.restore).to be_truthy
+ expect(File.exist?(bundle_path(snippet1))).to be true
+ expect(File.exist?(bundle_path(snippet2))).to be false
+ end
+
+ it_behaves_like 'imports snippet repositories'
end
- context 'when one snippet cannot be saved' do
- it 'returns false and do not process other snippets' do
- allow(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet_with_repo)).and_return(service)
+ context 'when export has only snippet bundles' do
+ let!(:snippet1) { create(:project_snippet, :repository, project: project, author: user) }
+ let!(:snippet2) { create(:project_snippet, :repository, project: project, author: user) }
+
+ before do
+ exporter.save
+
+ expect(File.exist?(bundle_path(snippet1))).to be true
+ expect(File.exist?(bundle_path(snippet2))).to be true
+ end
+
+ it_behaves_like 'imports snippet repositories'
+ end
+
+ context 'when any of the snippet repositories cannot be created' do
+ it 'continues processing other snippets and returns false' do
+ allow(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet1)).and_return(service)
allow(service).to receive(:restore).and_return(false)
- expect(Gitlab::ImportExport::SnippetRepoRestorer).not_to receive(:new).with(hash_including(snippet: snippet_without_repo))
- expect(restorer.restore).to be_falsey
+ expect(Gitlab::ImportExport::SnippetRepoRestorer).to receive(:new).with(hash_including(snippet: snippet2)).and_call_original
+
+ expect(restorer.restore).to be false
end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_base_spec.rb b/spec/lib/gitlab/instrumentation/redis_base_spec.rb
new file mode 100644
index 00000000000..5ea8f00114e
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_base_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Instrumentation::RedisBase, :request_store do
+ let(:instrumentation_class_a) do
+ stub_const('InstanceA', Class.new(described_class))
+ end
+
+ let(:instrumentation_class_b) do
+ stub_const('InstanceB', Class.new(described_class))
+ end
+
+ describe '.storage_key' do
+ it 'returns the class name with underscore' do
+ expect(instrumentation_class_a.storage_key).to eq('instance_a')
+ expect(instrumentation_class_b.storage_key).to eq('instance_b')
+ end
+ end
+
+ describe '.known_payload_keys' do
+ it 'returns generated payload keys' do
+ expect(instrumentation_class_a.known_payload_keys).to eq([:redis_instance_a_calls,
+ :redis_instance_a_duration_s,
+ :redis_instance_a_read_bytes,
+ :redis_instance_a_write_bytes])
+ end
+
+ it 'does not call calculation methods' do
+ expect(instrumentation_class_a).not_to receive(:get_request_count)
+ expect(instrumentation_class_a).not_to receive(:query_time)
+ expect(instrumentation_class_a).not_to receive(:read_bytes)
+ expect(instrumentation_class_a).not_to receive(:write_bytes)
+
+ instrumentation_class_a.known_payload_keys
+ end
+ end
+
+ describe '.payload' do
+ it 'returns values that are higher than 0' do
+ allow(instrumentation_class_a).to receive(:get_request_count) { 1 }
+ allow(instrumentation_class_a).to receive(:query_time) { 0.1 }
+ allow(instrumentation_class_a).to receive(:read_bytes) { 0.0 }
+ allow(instrumentation_class_a).to receive(:write_bytes) { 123 }
+
+ expected_payload = {
+ redis_instance_a_calls: 1,
+ redis_instance_a_write_bytes: 123,
+ redis_instance_a_duration_s: 0.1
+ }
+
+ expect(instrumentation_class_a.payload).to eq(expected_payload)
+ end
+ end
+
+ describe '.add_duration' do
+ it 'does not lose precision while adding' do
+ precision = 1.0 / (10**::Gitlab::InstrumentationHelper::DURATION_PRECISION)
+ 2.times { instrumentation_class_a.add_duration(0.4 * precision) }
+
+ # 2 * 0.4 should be 0.8 and get rounded to 1
+ expect(instrumentation_class_a.query_time).to eq(1 * precision)
+ end
+
+ context 'storage key overlapping' do
+ it 'keys do not overlap across storages' do
+ instrumentation_class_a.add_duration(0.4)
+ instrumentation_class_b.add_duration(0.5)
+
+ expect(instrumentation_class_a.query_time).to eq(0.4)
+ expect(instrumentation_class_b.query_time).to eq(0.5)
+ end
+ end
+ end
+
+ describe '.increment_request_count' do
+ context 'storage key overlapping' do
+ it 'keys do not overlap across storages' do
+ 3.times { instrumentation_class_a.increment_request_count }
+ 2.times { instrumentation_class_b.increment_request_count }
+
+ expect(instrumentation_class_a.get_request_count).to eq(3)
+ expect(instrumentation_class_b.get_request_count).to eq(2)
+ end
+ end
+ end
+
+ describe '.increment_write_bytes' do
+ context 'storage key overlapping' do
+ it 'keys do not overlap across storages' do
+ 2.times do
+ instrumentation_class_a.increment_write_bytes(42)
+ instrumentation_class_b.increment_write_bytes(77)
+ end
+
+ expect(instrumentation_class_a.write_bytes).to eq(42 * 2)
+ expect(instrumentation_class_b.write_bytes).to eq(77 * 2)
+ end
+ end
+ end
+
+ describe '.increment_read_bytes' do
+ context 'storage key overlapping' do
+ it 'keys do not overlap across storages' do
+ 2.times do
+ instrumentation_class_a.increment_read_bytes(42)
+ instrumentation_class_b.increment_read_bytes(77)
+ end
+
+ expect(instrumentation_class_a.read_bytes).to eq(42 * 2)
+ expect(instrumentation_class_b.read_bytes).to eq(77 * 2)
+ end
+ end
+ end
+
+ describe '.add_call_details' do
+ before do
+ allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?) { true }
+ end
+
+ context 'storage key overlapping' do
+ it 'keys do not overlap across storages' do
+ 2.times do
+ instrumentation_class_a.add_call_details(0.3, [:set])
+ instrumentation_class_b.add_call_details(0.4, [:set])
+ end
+
+ expect(instrumentation_class_a.detail_store).to match(
+ [
+ a_hash_including(cmd: :set, duration: 0.3, backtrace: an_instance_of(Array)),
+ a_hash_including(cmd: :set, duration: 0.3, backtrace: an_instance_of(Array))
+ ]
+ )
+
+ expect(instrumentation_class_b.detail_store).to match(
+ [
+ a_hash_including(cmd: :set, duration: 0.4, backtrace: an_instance_of(Array)),
+ a_hash_including(cmd: :set, duration: 0.4, backtrace: an_instance_of(Array))
+ ]
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
new file mode 100644
index 00000000000..25506d63091
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+
+describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_shared_state, :request_store do
+ using RSpec::Parameterized::TableSyntax
+
+ describe 'read and write' do
+ where(:setup, :command, :expect_write, :expect_read) do
+ # The response is 'OK', the request size is the combined size of array
+ # elements. Exercise counting of a status reply.
+ [] | [:set, 'foo', 'bar'] | 3 + 3 + 3 | 2
+
+ # The response is 1001, so 4 bytes. Exercise counting an integer reply.
+ [[:set, 'foobar', 1000]] | [:incr, 'foobar'] | 4 + 6 | 4
+
+ # Exercise counting empty multi bulk reply
+ [] | [:hgetall, 'foobar'] | 7 + 6 | 0
+
+ # Hgetall response length is combined length of keys and values in the
+ # hash. Exercises counting of a multi bulk reply
+ [[:hset, 'myhash', 'field', 'hello world']] | [:hgetall, 'myhash'] | 7 + 6 | 5 + 11
+
+ # Exercise counting of a bulk reply
+ [[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | 3 + 3 | 3 * 100
+
+ # Nested array response: ['123456-89', ['foo', 'bar']]
+ [[:xadd, 'mystream', '123456-89', 'foo', 'bar']] | [:xrange, 'mystream', '-', '+'] | 6 + 8 + 1 + 1 | 9 + 3 + 3
+ end
+
+ with_them do
+ it 'counts bytes read and written' do
+ Gitlab::Redis::SharedState.with do |redis|
+ setup.each { |cmd| redis.call(cmd) }
+ RequestStore.clear!
+ redis.call(command)
+ end
+
+ expect(Gitlab::Instrumentation::Redis.read_bytes).to eq(expect_read)
+ expect(Gitlab::Instrumentation::Redis.write_bytes).to eq(expect_write)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation/redis_spec.rb b/spec/lib/gitlab/instrumentation/redis_spec.rb
new file mode 100644
index 00000000000..8311c4f5bbb
--- /dev/null
+++ b/spec/lib/gitlab/instrumentation/redis_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Instrumentation::Redis do
+ def stub_storages(method, value)
+ described_class::STORAGES.each do |storage|
+ allow(storage).to receive(method) { value }
+ end
+ end
+
+ shared_examples 'aggregation of redis storage data' do |method|
+ describe "#{method} sum" do
+ it "sums data from all Redis storages" do
+ amount = 0.3
+
+ stub_storages(method, amount)
+
+ expect(described_class.public_send(method)).to eq(described_class::STORAGES.size * amount)
+ end
+ end
+ end
+
+ it_behaves_like 'aggregation of redis storage data', :get_request_count
+ it_behaves_like 'aggregation of redis storage data', :query_time
+ it_behaves_like 'aggregation of redis storage data', :read_bytes
+ it_behaves_like 'aggregation of redis storage data', :write_bytes
+
+ describe '.known_payload_keys' do
+ it 'returns all known payload keys' do
+ expected_keys = [
+ :redis_calls,
+ :redis_duration_s,
+ :redis_read_bytes,
+ :redis_write_bytes,
+ :redis_action_cable_calls,
+ :redis_action_cable_duration_s,
+ :redis_action_cable_read_bytes,
+ :redis_action_cable_write_bytes,
+ :redis_cache_calls,
+ :redis_cache_duration_s,
+ :redis_cache_read_bytes,
+ :redis_cache_write_bytes,
+ :redis_queues_calls,
+ :redis_queues_duration_s,
+ :redis_queues_read_bytes,
+ :redis_queues_write_bytes,
+ :redis_shared_state_calls,
+ :redis_shared_state_duration_s,
+ :redis_shared_state_read_bytes,
+ :redis_shared_state_write_bytes
+ ]
+
+ expect(described_class.known_payload_keys).to eq(expected_keys)
+ end
+
+ it 'does not call storage calculation methods' do
+ described_class::STORAGES.each do |storage|
+ expect(storage).not_to receive(:get_request_count)
+ expect(storage).not_to receive(:query_time)
+ expect(storage).not_to receive(:read_bytes)
+ expect(storage).not_to receive(:write_bytes)
+ end
+
+ described_class.known_payload_keys
+ end
+ end
+
+ describe '.payload', :request_store do
+ before do
+ Gitlab::Redis::Cache.with { |redis| redis.set('cache-test', 321) }
+ Gitlab::Redis::SharedState.with { |redis| redis.set('shared-state-test', 123) }
+ end
+
+ it 'returns payload filtering out zeroed values' do
+ expected_payload = {
+ # Aggregated results
+ redis_calls: 2,
+ redis_duration_s: be >= 0,
+ redis_read_bytes: be >= 0,
+ redis_write_bytes: be >= 0,
+
+ # Cache results
+ redis_cache_calls: 1,
+ redis_cache_duration_s: be >= 0,
+ redis_cache_read_bytes: be >= 0,
+ redis_cache_write_bytes: be >= 0,
+
+ # Shared state results
+ redis_shared_state_calls: 1,
+ redis_shared_state_duration_s: be >= 0,
+ redis_shared_state_read_bytes: be >= 0,
+ redis_shared_state_write_bytes: be >= 0
+ }
+
+ expect(described_class.payload).to include(expected_payload)
+ expect(described_class.payload.keys).to match_array(expected_payload.keys)
+ end
+ end
+
+ describe '.detail_store' do
+ it 'returns a flat array of detail stores with the storage name added to each item' do
+ details_row = { cmd: 'GET foo', duration: 1 }
+
+ stub_storages(:detail_store, [details_row])
+
+ expect(described_class.detail_store)
+ .to contain_exactly(details_row.merge(storage: 'ActionCable'),
+ details_row.merge(storage: 'Cache'),
+ details_row.merge(storage: 'Queues'),
+ details_row.merge(storage: 'SharedState'))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index fdb842dac0f..15d377a16fc 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -6,6 +6,41 @@ require 'rspec-parameterized'
describe Gitlab::InstrumentationHelper do
using RSpec::Parameterized::TableSyntax
+ describe '.keys' do
+ it 'returns all available payload keys' do
+ expected_keys = [
+ :gitaly_calls,
+ :gitaly_duration_s,
+ :rugged_calls,
+ :rugged_duration_s,
+ :elasticsearch_calls,
+ :elasticsearch_duration_s,
+ :redis_calls,
+ :redis_duration_s,
+ :redis_read_bytes,
+ :redis_write_bytes,
+ :redis_action_cable_calls,
+ :redis_action_cable_duration_s,
+ :redis_action_cable_read_bytes,
+ :redis_action_cable_write_bytes,
+ :redis_cache_calls,
+ :redis_cache_duration_s,
+ :redis_cache_read_bytes,
+ :redis_cache_write_bytes,
+ :redis_queues_calls,
+ :redis_queues_duration_s,
+ :redis_queues_read_bytes,
+ :redis_queues_write_bytes,
+ :redis_shared_state_calls,
+ :redis_shared_state_duration_s,
+ :redis_shared_state_read_bytes,
+ :redis_shared_state_write_bytes
+ ]
+
+ expect(described_class.keys).to eq(expected_keys)
+ end
+ end
+
describe '.add_instrumentation_data', :request_store do
let(:payload) { {} }
@@ -34,12 +69,30 @@ describe Gitlab::InstrumentationHelper do
context 'when Redis calls are made' do
it 'adds Redis data and omits Gitaly data' do
- Gitlab::Redis::Cache.with { |redis| redis.get('test-instrumentation') }
+ Gitlab::Redis::Cache.with { |redis| redis.set('test-cache', 123) }
+ Gitlab::Redis::Queues.with { |redis| redis.set('test-queues', 321) }
subject
- expect(payload[:redis_calls]).to eq(1)
+ # Aggregated payload
+ expect(payload[:redis_calls]).to eq(2)
expect(payload[:redis_duration_s]).to be >= 0
+ expect(payload[:redis_read_bytes]).to be >= 0
+ expect(payload[:redis_write_bytes]).to be >= 0
+
+ # Shared state payload
+ expect(payload[:redis_queues_calls]).to eq(1)
+ expect(payload[:redis_queues_duration_s]).to be >= 0
+ expect(payload[:redis_queues_read_bytes]).to be >= 0
+ expect(payload[:redis_queues_write_bytes]).to be >= 0
+
+ # Cache payload
+ expect(payload[:redis_cache_calls]).to eq(1)
+ expect(payload[:redis_cache_duration_s]).to be >= 0
+ expect(payload[:redis_cache_read_bytes]).to be >= 0
+ expect(payload[:redis_cache_write_bytes]).to be >= 0
+
+ # Gitaly
expect(payload[:gitaly_calls]).to be_nil
expect(payload[:gitaly_duration]).to be_nil
end
diff --git a/spec/lib/gitlab/issuable_metadata_spec.rb b/spec/lib/gitlab/issuable_metadata_spec.rb
index 7632bc3060a..1920cecfc29 100644
--- a/spec/lib/gitlab/issuable_metadata_spec.rb
+++ b/spec/lib/gitlab/issuable_metadata_spec.rb
@@ -6,14 +6,12 @@ describe Gitlab::IssuableMetadata do
let(:user) { create(:user) }
let!(:project) { create(:project, :public, :repository, creator: user, namespace: user.namespace) }
- subject { Class.new { include Gitlab::IssuableMetadata }.new }
-
it 'returns an empty Hash if an empty collection is provided' do
- expect(subject.issuable_meta_data(Issue.none, 'Issue', user)).to eq({})
+ expect(described_class.new(user, Issue.none).data).to eq({})
end
it 'raises an error when given a collection with no limit' do
- expect { subject.issuable_meta_data(Issue.all, 'Issue', user) }.to raise_error(/must have a limit/)
+ expect { described_class.new(user, Issue.all) }.to raise_error(/must have a limit/)
end
context 'issues' do
@@ -25,7 +23,7 @@ describe Gitlab::IssuableMetadata do
let!(:closing_issues) { create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) }
it 'aggregates stats on issues' do
- data = subject.issuable_meta_data(Issue.all.limit(10), 'Issue', user)
+ data = described_class.new(user, Issue.all.limit(10)).data
expect(data.count).to eq(2)
expect(data[issue.id].upvotes).to eq(1)
@@ -48,7 +46,7 @@ describe Gitlab::IssuableMetadata do
let!(:note) { create(:note_on_merge_request, author: user, project: project, noteable: merge_request, note: "a comment on a MR") }
it 'aggregates stats on merge requests' do
- data = subject.issuable_meta_data(MergeRequest.all.limit(10), 'MergeRequest', user)
+ data = described_class.new(user, MergeRequest.all.limit(10)).data
expect(data.count).to eq(2)
expect(data[merge_request.id].upvotes).to eq(1)
diff --git a/spec/lib/gitlab/jira_import/base_importer_spec.rb b/spec/lib/gitlab/jira_import/base_importer_spec.rb
index ecaf3def589..cda491393e8 100644
--- a/spec/lib/gitlab/jira_import/base_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/base_importer_spec.rb
@@ -8,27 +8,13 @@ describe Gitlab::JiraImport::BaseImporter do
let(:project) { create(:project) }
describe 'with any inheriting class' do
- context 'when an error is returned from the project validation' do
- before do
- stub_feature_flags(jira_issue_import: false)
-
- allow(project).to receive(:validate_jira_import_settings!)
- .and_raise(Projects::ImportService::Error, 'Jira import feature is disabled.')
- end
-
- it 'raises exception' do
- expect { described_class.new(project) }.to raise_error(Projects::ImportService::Error, 'Jira import feature is disabled.')
- end
- end
-
context 'when project validation is ok' do
let!(:jira_service) { create(:jira_service, project: project) }
before do
- stub_feature_flags(jira_issue_import: true)
stub_jira_service_test
- allow(project).to receive(:validate_jira_import_settings!)
+ allow(Gitlab::JiraImport).to receive(:validate_project_settings!)
end
context 'when Jira service exists' do
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index 6cf06c20e19..0d790f49450 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -14,7 +14,6 @@ describe Gitlab::JiraImport::IssuesImporter do
subject { described_class.new(project) }
before do
- stub_feature_flags(jira_issue_import: true)
stub_jira_service_test
end
@@ -39,15 +38,22 @@ describe Gitlab::JiraImport::IssuesImporter do
end
context 'with results returned' do
- JiraIssue = Struct.new(:id)
- let_it_be(:jira_issues) { [JiraIssue.new(1), JiraIssue.new(2)] }
+ jira_issue = Struct.new(:id)
+ let_it_be(:jira_issues) { [jira_issue.new(1), jira_issue.new(2)] }
- def mock_issue_serializer(count)
+ def mock_issue_serializer(count, raise_exception_on_even_mocks: false)
serializer = instance_double(Gitlab::JiraImport::IssueSerializer, execute: { key: 'data' })
+ next_iid = project.issues.maximum(:iid).to_i
count.times do |i|
- expect(Gitlab::JiraImport::IssueSerializer).to receive(:new)
- .with(project, jira_issues[i], current_user.id, { iid: i + 1 }).and_return(serializer)
+ if raise_exception_on_even_mocks && i.even?
+ expect(Gitlab::JiraImport::IssueSerializer).to receive(:new)
+ .with(project, jira_issues[i], current_user.id, { iid: next_iid + 1 }).and_raise('Some error')
+ else
+ next_iid += 1
+ expect(Gitlab::JiraImport::IssueSerializer).to receive(:new)
+ .with(project, jira_issues[i], current_user.id, { iid: next_iid }).and_return(serializer)
+ end
end
end
@@ -70,21 +76,22 @@ describe Gitlab::JiraImport::IssuesImporter do
end
end
- context 'when there is more than one page of results' do
+ context 'when importing some issue raises an exception' do
before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
+ stub_const("#{described_class.name}::BATCH_SIZE", 3)
end
- it 'schedules 3 import jobs' do
+ it 'schedules 2 import jobs' do
expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0], jira_issues[1]])
- expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).twice.times
- expect(Gitlab::Cache::Import::Caching).to receive(:set_add).twice.times.and_call_original
- expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.times.and_call_original
- mock_issue_serializer(2)
+ expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).once
+ expect(Gitlab::Cache::Import::Caching).to receive(:set_add).once.and_call_original
+ expect(Gitlab::Cache::Import::Caching).to receive(:set_includes?).twice.and_call_original
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
+ mock_issue_serializer(2, raise_exception_on_even_mocks: true)
job_waiter = subject.execute
- expect(job_waiter.jobs_remaining).to eq(2)
+ expect(job_waiter.jobs_remaining).to eq(1)
expect(Gitlab::JiraImport.get_issues_next_start_at(project.id)).to eq(2)
end
end
diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
index 67eb541d376..19661ff4e73 100644
--- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
@@ -15,7 +15,6 @@ describe Gitlab::JiraImport::LabelsImporter do
subject { importer.execute }
before do
- stub_feature_flags(jira_issue_import: true)
stub_const('Gitlab::JiraImport::LabelsImporter::MAX_LABELS', 2)
end
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index c5c3d6ef4b9..5b95891c97e 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -5,9 +5,109 @@ require 'spec_helper'
describe Gitlab::JiraImport do
let(:project_id) { 321 }
+ describe '.validate_project_settings!' do
+ include JiraServiceHelper
+
+ let_it_be(:project, reload: true) { create(:project) }
+ let(:additional_params) { {} }
+
+ subject { described_class.validate_project_settings!(project, additional_params) }
+
+ shared_examples 'raise Jira import error' do |message|
+ it 'returns error' do
+ expect { subject }.to raise_error(Projects::ImportService::Error, message)
+ end
+ end
+
+ shared_examples 'jira configuration base checks' do
+ context 'with configuration_check set to false' do
+ before do
+ additional_params[:configuration_check] = false
+ end
+
+ it 'does not raise Jira integration error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when Jira service was not setup' do
+ it_behaves_like 'raise Jira import error', 'Jira integration not configured.'
+ end
+
+ context 'when Jira service exists' do
+ let!(:jira_service) { create(:jira_service, project: project, active: true) }
+
+ context 'when Jira connection is not valid' do
+ before do
+ WebMock.stub_request(:get, 'https://jira.example.com/rest/api/2/serverInfo')
+ .to_raise(JIRA::HTTPError.new(double(message: 'Some failure.')))
+ end
+
+ it_behaves_like 'raise Jira import error', 'Unable to connect to the Jira instance. Please check your Jira integration configuration.'
+ end
+ end
+ end
+
+ before do
+ stub_jira_service_test
+ end
+
+ context 'without user param' do
+ it_behaves_like 'jira configuration base checks'
+
+ context 'when jira connection is valid' do
+ let!(:jira_service) { create(:jira_service, project: project, active: true) }
+
+ it 'does not return any error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+
+ context 'with user param provided' do
+ let_it_be(:user) { create(:user) }
+
+ let(:additional_params) { { user: user } }
+
+ context 'when user has permission to run import' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'jira configuration base checks'
+
+ context 'when jira service is configured' do
+ let!(:jira_service) { create(:jira_service, project: project, active: true) }
+
+ context 'when issues feature is disabled' do
+ let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+
+ it_behaves_like 'raise Jira import error', 'Cannot import because issues are not available in this project.'
+ end
+
+ context 'when everything is ok' do
+ it 'does not return any error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context 'when user does not have permissions to run the import' do
+ before do
+ create(:jira_service, project: project, active: true)
+
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'raise Jira import error', 'You do not have permissions to run the import.'
+ end
+ end
+ end
+
describe '.jira_issue_cache_key' do
it 'returns cache key for Jira issue imported to given project' do
- expect(described_class.jira_issue_cache_key(project_id, 'DEMO-123')).to eq("jira-import/items-mapper/#{project_id}/issues/DEMO-123")
+ expect(described_class.jira_item_cache_key(project_id, 'DEMO-123', :issues)).to eq("jira-import/items-mapper/#{project_id}/issues/DEMO-123")
end
end
@@ -44,6 +144,29 @@ describe Gitlab::JiraImport do
end
end
+ describe '.cache_users_mapping', :clean_gitlab_redis_cache do
+ let(:data) { { 'user1' => '456', 'user234' => '23' } }
+
+ it 'stores the data correctly' do
+ described_class.cache_users_mapping(project_id, data)
+
+ expect(Gitlab::Cache::Import::Caching.read("jira-import/items-mapper/#{project_id}/users/user1")).to eq('456')
+ expect(Gitlab::Cache::Import::Caching.read("jira-import/items-mapper/#{project_id}/users/user234")).to eq('23')
+ end
+ end
+
+ describe '.get_user_mapping', :clean_gitlab_redis_cache do
+ it 'reads the data correctly' do
+ Gitlab::Cache::Import::Caching.write("jira-import/items-mapper/#{project_id}/users/user-123", '456')
+
+ expect(described_class.get_user_mapping(project_id, 'user-123')).to eq(456)
+ end
+
+ it 'returns nil if value not found' do
+ expect(described_class.get_user_mapping(project_id, 'user-123')).to be_nil
+ end
+ end
+
describe '.store_issues_next_started_at', :clean_gitlab_redis_cache do
it 'stores nil value' do
described_class.store_issues_next_started_at(project_id, nil)
diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
index 1f925fd45af..0e4179d5887 100644
--- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
@@ -17,7 +17,8 @@ describe Gitlab::Kubernetes::Helm::API do
name: application_name,
chart: 'chart-name',
rbac: rbac,
- files: files
+ files: files,
+ local_tiller_enabled: true
)
end
@@ -142,7 +143,7 @@ describe Gitlab::Kubernetes::Helm::API do
end
context 'with a service account' do
- let(:command) { Gitlab::Kubernetes::Helm::InitCommand.new(name: application_name, files: files, rbac: rbac) }
+ let(:command) { Gitlab::Kubernetes::Helm::InitCommand.new(name: application_name, files: files, rbac: rbac, local_tiller_enabled: true) }
context 'rbac-enabled cluster' do
let(:rbac) { true }
diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
index 2a4a911cf38..f9bcb8abdb1 100644
--- a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
@@ -11,25 +11,14 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do
let(:rbac) { false }
let(:test_class) do
- Class.new do
- include Gitlab::Kubernetes::Helm::BaseCommand
-
+ Class.new(Gitlab::Kubernetes::Helm::BaseCommand) do
def initialize(rbac)
- @rbac = rbac
- end
-
- def name
- "test-class-name"
- end
-
- def rbac?
- @rbac
- end
-
- def files
- {
- some: 'value'
- }
+ super(
+ name: 'test-class-name',
+ rbac: rbac,
+ files: { some: 'value' },
+ local_tiller_enabled: false
+ )
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb
index 95d60c18d56..2bf8b294821 100644
--- a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
describe Gitlab::Kubernetes::Helm::DeleteCommand do
- subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) }
+ subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files, local_tiller_enabled: local_tiller_enabled) }
let(:app_name) { 'app-name' }
let(:rbac) { true }
let(:files) { {} }
+ let(:local_tiller_enabled) { true }
it_behaves_like 'helm command generator' do
let(:commands) do
@@ -21,9 +22,7 @@ describe Gitlab::Kubernetes::Helm::DeleteCommand do
end
context 'tillerless feature disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
+ let(:local_tiller_enabled) { false }
it_behaves_like 'helm command generator' do
let(:commands) do
diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
index 05d9b63d12b..61b8eb30b42 100644
--- a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Kubernetes::Helm::InitCommand do
- subject(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac) }
+ subject(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac, local_tiller_enabled: false) }
let(:application) { create(:clusters_applications_helm) }
let(:rbac) { false }
diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
index abd29e97505..6fc91300f5b 100644
--- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
@@ -12,7 +12,8 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
version: version,
repository: repository,
preinstall: preinstall,
- postinstall: postinstall
+ postinstall: postinstall,
+ local_tiller_enabled: local_tiller_enabled
)
end
@@ -22,6 +23,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
let(:version) { '1.2.3' }
let(:preinstall) { nil }
let(:postinstall) { nil }
+ let(:local_tiller_enabled) { true }
it_behaves_like 'helm command generator' do
let(:commands) do
@@ -51,9 +53,7 @@ describe Gitlab::Kubernetes::Helm::InstallCommand do
end
context 'tillerless feature disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
+ let(:local_tiller_enabled) { false }
let(:tls_flags) do
<<~EOS.squish
diff --git a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
index eee842fa7d6..8d965a25f84 100644
--- a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
@@ -7,6 +7,7 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do
let(:repository) { 'https://repository.example.com' }
let(:rbac) { false }
let(:version) { '1.2.3' }
+ let(:local_tiller_enabled) { true }
subject(:patch_command) do
described_class.new(
@@ -15,14 +16,13 @@ describe Gitlab::Kubernetes::Helm::PatchCommand do
rbac: rbac,
files: files,
version: version,
- repository: repository
+ repository: repository,
+ local_tiller_enabled: local_tiller_enabled
)
end
context 'when local tiller feature is disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
+ let(:local_tiller_enabled) { false }
let(:tls_flags) do
<<~EOS.squish
diff --git a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
index 981bb4e4abf..3773c428713 100644
--- a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Kubernetes::Helm::ResetCommand do
- subject(:reset_command) { described_class.new(name: name, rbac: rbac, files: files) }
+ subject(:reset_command) { described_class.new(name: name, rbac: rbac, files: files, local_tiller_enabled: false) }
let(:rbac) { true }
let(:name) { 'helm' }
diff --git a/spec/lib/gitlab/kubernetes/network_policy_spec.rb b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
index f23d215a9a1..5a920d78436 100644
--- a/spec/lib/gitlab/kubernetes/network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
@@ -39,28 +39,30 @@ describe Gitlab::Kubernetes::NetworkPolicy do
describe '.from_yaml' do
let(:manifest) do
- <<-POLICY
-apiVersion: networking.k8s.io/v1
-kind: NetworkPolicy
-metadata:
- name: example-name
- namespace: example-namespace
-spec:
- podSelector:
- matchLabels:
- role: db
- policyTypes:
- - Ingress
- ingress:
- - from:
- - namespaceSelector:
- matchLabels:
- project: myproject
+ <<~POLICY
+ apiVersion: networking.k8s.io/v1
+ kind: NetworkPolicy
+ metadata:
+ name: example-name
+ namespace: example-namespace
+ labels:
+ app: foo
+ spec:
+ podSelector:
+ matchLabels:
+ role: db
+ policyTypes:
+ - Ingress
+ ingress:
+ - from:
+ - namespaceSelector:
+ matchLabels:
+ project: myproject
POLICY
end
let(:resource) do
::Kubeclient::Resource.new(
- metadata: { name: name, namespace: namespace },
+ metadata: { name: name, namespace: namespace, labels: { app: 'foo' } },
spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
)
end
@@ -83,20 +85,20 @@ spec:
context 'with manifest without metadata' do
let(:manifest) do
- <<-POLICY
-apiVersion: networking.k8s.io/v1
-kind: NetworkPolicy
-spec:
- podSelector:
- matchLabels:
- role: db
- policyTypes:
- - Ingress
- ingress:
- - from:
- - namespaceSelector:
- matchLabels:
- project: myproject
+ <<~POLICY
+ apiVersion: networking.k8s.io/v1
+ kind: NetworkPolicy
+ spec:
+ podSelector:
+ matchLabels:
+ role: db
+ policyTypes:
+ - Ingress
+ ingress:
+ - from:
+ - namespaceSelector:
+ matchLabels:
+ project: myproject
POLICY
end
@@ -105,12 +107,12 @@ spec:
context 'with manifest without spec' do
let(:manifest) do
- <<-POLICY
-apiVersion: networking.k8s.io/v1
-kind: NetworkPolicy
-metadata:
- name: example-name
- namespace: example-namespace
+ <<~POLICY
+ apiVersion: networking.k8s.io/v1
+ kind: NetworkPolicy
+ metadata:
+ name: example-name
+ namespace: example-namespace
POLICY
end
@@ -119,24 +121,24 @@ metadata:
context 'with disallowed class' do
let(:manifest) do
- <<-POLICY
-apiVersion: networking.k8s.io/v1
-kind: NetworkPolicy
-metadata:
- name: example-name
- namespace: example-namespace
- creationTimestamp: 2020-04-14T00:08:30Z
-spec:
- podSelector:
- matchLabels:
- role: db
- policyTypes:
- - Ingress
- ingress:
- - from:
- - namespaceSelector:
- matchLabels:
- project: myproject
+ <<~POLICY
+ apiVersion: networking.k8s.io/v1
+ kind: NetworkPolicy
+ metadata:
+ name: example-name
+ namespace: example-namespace
+ creationTimestamp: 2020-04-14T00:08:30Z
+ spec:
+ podSelector:
+ matchLabels:
+ role: db
+ policyTypes:
+ - Ingress
+ ingress:
+ - from:
+ - namespaceSelector:
+ matchLabels:
+ project: myproject
POLICY
end
@@ -147,13 +149,16 @@ spec:
describe '.from_resource' do
let(:resource) do
::Kubeclient::Resource.new(
- metadata: { name: name, namespace: namespace, creationTimestamp: '2020-04-14T00:08:30Z', resourceVersion: '4990' },
+ metadata: {
+ name: name, namespace: namespace, creationTimestamp: '2020-04-14T00:08:30Z',
+ labels: { app: 'foo' }, resourceVersion: '4990'
+ },
spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
)
end
let(:generated_resource) do
::Kubeclient::Resource.new(
- metadata: { name: name, namespace: namespace },
+ metadata: { name: name, namespace: namespace, labels: { app: 'foo' } },
spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
)
end
@@ -213,7 +218,9 @@ spec:
metadata: { name: name, namespace: namespace },
spec: { podSelector: pod_selector, policyTypes: %w(Ingress Egress), ingress: ingress, egress: egress }
}.deep_stringify_keys
- )
+ ),
+ is_autodevops: false,
+ is_enabled: true
}
end
@@ -221,4 +228,167 @@ spec:
it { is_expected.to eq(json_policy) }
end
+
+ describe '#autodevops?' do
+ subject { policy.autodevops? }
+
+ let(:chart) { nil }
+ let(:policy) do
+ described_class.new(
+ name: name,
+ namespace: namespace,
+ labels: { chart: chart },
+ pod_selector: pod_selector,
+ ingress: ingress
+ )
+ end
+
+ it { is_expected.to be false }
+
+ context 'with non-autodevops chart' do
+ let(:chart) { 'foo' }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with autodevops chart' do
+ let(:chart) { 'auto-deploy-app-0.6.0' }
+
+ it { is_expected.to be true }
+ end
+ end
+
+ describe '#enabled?' do
+ subject { policy.enabled? }
+
+ let(:pod_selector) { nil }
+ let(:policy) do
+ described_class.new(
+ name: name,
+ namespace: namespace,
+ pod_selector: pod_selector,
+ ingress: ingress
+ )
+ end
+
+ it { is_expected.to be true }
+
+ context 'with empty pod_selector' do
+ let(:pod_selector) { {} }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with nil matchLabels in pod_selector' do
+ let(:pod_selector) { { matchLabels: nil } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with empty matchLabels in pod_selector' do
+ let(:pod_selector) { { matchLabels: {} } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with disabled_by label in matchLabels in pod_selector' do
+ let(:pod_selector) do
+ { matchLabels: { Gitlab::Kubernetes::NetworkPolicy::DISABLED_BY_LABEL => 'gitlab' } }
+ end
+
+ it { is_expected.to be false }
+ end
+ end
+
+ describe '#enable' do
+ subject { policy.enabled? }
+
+ let(:pod_selector) { nil }
+ let(:policy) do
+ described_class.new(
+ name: name,
+ namespace: namespace,
+ pod_selector: pod_selector,
+ ingress: ingress
+ )
+ end
+
+ before do
+ policy.enable
+ end
+
+ it { is_expected.to be true }
+
+ context 'with empty pod_selector' do
+ let(:pod_selector) { {} }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with nil matchLabels in pod_selector' do
+ let(:pod_selector) { { matchLabels: nil } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with empty matchLabels in pod_selector' do
+ let(:pod_selector) { { matchLabels: {} } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with disabled_by label in matchLabels in pod_selector' do
+ let(:pod_selector) do
+ { matchLabels: { Gitlab::Kubernetes::NetworkPolicy::DISABLED_BY_LABEL => 'gitlab' } }
+ end
+
+ it { is_expected.to be true }
+ end
+ end
+
+ describe '#disable' do
+ subject { policy.enabled? }
+
+ let(:pod_selector) { nil }
+ let(:policy) do
+ described_class.new(
+ name: name,
+ namespace: namespace,
+ pod_selector: pod_selector,
+ ingress: ingress
+ )
+ end
+
+ before do
+ policy.disable
+ end
+
+ it { is_expected.to be false }
+
+ context 'with empty pod_selector' do
+ let(:pod_selector) { {} }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with nil matchLabels in pod_selector' do
+ let(:pod_selector) { { matchLabels: nil } }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with empty matchLabels in pod_selector' do
+ let(:pod_selector) { { matchLabels: {} } }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with disabled_by label in matchLabels in pod_selector' do
+ let(:pod_selector) do
+ { matchLabels: { Gitlab::Kubernetes::NetworkPolicy::DISABLED_BY_LABEL => 'gitlab' } }
+ end
+
+ it { is_expected.to be false }
+ end
+ end
end
diff --git a/spec/lib/gitlab/lfs_token_spec.rb b/spec/lib/gitlab/lfs_token_spec.rb
index b2fd7bdd307..58a3767b242 100644
--- a/spec/lib/gitlab/lfs_token_spec.rb
+++ b/spec/lib/gitlab/lfs_token_spec.rb
@@ -3,6 +3,13 @@
require 'spec_helper'
describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:deploy_key) { create(:deploy_key) }
+
+ let(:actor) { user }
+ let(:lfs_token) { described_class.new(actor) }
+
describe '#token' do
shared_examples 'a valid LFS token' do
it 'returns a computed token' do
@@ -10,14 +17,11 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
expect(token).not_to be_nil
expect(token).to be_a String
- expect(described_class.new(actor).token_valid?(token)).to be_truthy
+ expect(described_class.new(actor).token_valid?(token)).to be true
end
end
context 'when the actor is a user' do
- let(:actor) { create(:user, username: 'test_user_lfs_1') }
- let(:lfs_token) { described_class.new(actor) }
-
it_behaves_like 'a valid LFS token'
it 'returns the correct username' do
@@ -30,9 +34,7 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
end
context 'when the actor is a key' do
- let(:user) { create(:user, username: 'test_user_lfs_2') }
- let(:actor) { create(:key, user: user) }
- let(:lfs_token) { described_class.new(actor) }
+ let_it_be(:actor) { create(:key, user: user) }
it_behaves_like 'a valid LFS token'
@@ -46,10 +48,8 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
end
context 'when the actor is a deploy key' do
+ let(:actor) { deploy_key }
let(:actor_id) { 1 }
- let(:actor) { create(:deploy_key) }
- let(:project) { create(:project) }
- let(:lfs_token) { described_class.new(actor) }
before do
allow(actor).to receive(:id).and_return(actor_id)
@@ -74,45 +74,45 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
end
describe '#token_valid?' do
- let(:actor) { create(:user, username: 'test_user_lfs_1') }
- let(:lfs_token) { described_class.new(actor) }
-
context 'where the token is invalid' do
context "because it's junk" do
it 'returns false' do
- expect(lfs_token.token_valid?('junk')).to be_falsey
+ expect(lfs_token.token_valid?('junk')).to be false
end
end
context "because it's been fiddled with" do
it 'returns false' do
fiddled_token = lfs_token.token.tap { |token| token[0] = 'E' }
- expect(lfs_token.token_valid?(fiddled_token)).to be_falsey
+
+ expect(lfs_token.token_valid?(fiddled_token)).to be false
end
end
- context "because it was generated with a different secret" do
+ context 'because it was generated with a different secret' do
it 'returns false' do
different_actor = create(:user, username: 'test_user_lfs_2')
different_secret_token = described_class.new(different_actor).token
- expect(lfs_token.token_valid?(different_secret_token)).to be_falsey
+
+ expect(lfs_token.token_valid?(different_secret_token)).to be false
end
end
context "because it's expired" do
it 'returns false' do
expired_token = lfs_token.token
- # Needs to be at least 1860 seconds, because the default expiry is
- # 1800 seconds with an additional 60 second leeway.
- Timecop.freeze(Time.now + 1865) do
- expect(lfs_token.token_valid?(expired_token)).to be_falsey
+
+ # Needs to be at least LfsToken::DEFAULT_EXPIRE_TIME + 60 seconds
+ # in order to check whether it is valid 1 minute after it has expired
+ Timecop.freeze(Time.now + described_class::DEFAULT_EXPIRE_TIME + 60) do
+ expect(lfs_token.token_valid?(expired_token)).to be false
end
end
end
context 'where the token is valid' do
it 'returns true' do
- expect(lfs_token.token_valid?(lfs_token.token)).to be_truthy
+ expect(lfs_token.token_valid?(lfs_token.token)).to be true
end
end
@@ -121,7 +121,7 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
let(:actor) { create(:user, :blocked) }
it 'returns false' do
- expect(lfs_token.token_valid?(lfs_token.token)).to be_falsey
+ expect(lfs_token.token_valid?(lfs_token.token)).to be false
end
end
@@ -129,7 +129,7 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
let(:actor) { create(:user, password_expires_at: 1.minute.ago) }
it 'returns false' do
- expect(lfs_token.token_valid?(lfs_token.token)).to be_falsey
+ expect(lfs_token.token_valid?(lfs_token.token)).to be false
end
end
end
@@ -143,7 +143,7 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
let(:actor) { create(:user, :blocked) }
it 'returns false' do
- expect(lfs_token.token_valid?(lfs_token.token)).to be_falsey
+ expect(lfs_token.token_valid?(lfs_token.token)).to be false
end
end
@@ -151,7 +151,7 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
let(:actor) { create(:user, password_expires_at: 1.minute.ago) }
it 'returns true' do
- expect(lfs_token.token_valid?(lfs_token.token)).to be_truthy
+ expect(lfs_token.token_valid?(lfs_token.token)).to be true
end
end
end
@@ -159,27 +159,21 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
end
describe '#deploy_key_pushable?' do
- let(:lfs_token) { described_class.new(actor) }
-
context 'when actor is not a DeployKey' do
- let(:actor) { create(:user) }
- let(:project) { create(:project) }
-
it 'returns false' do
- expect(lfs_token.deploy_key_pushable?(project)).to be_falsey
+ expect(lfs_token.deploy_key_pushable?(project)).to be false
end
end
context 'when actor is a DeployKey' do
- let(:deploy_keys_project) { create(:deploy_keys_project, can_push: can_push) }
- let(:project) { deploy_keys_project.project }
+ let(:deploy_keys_project) { create(:deploy_keys_project, project: project, can_push: can_push) }
let(:actor) { deploy_keys_project.deploy_key }
context 'but the DeployKey cannot push to the project' do
let(:can_push) { false }
it 'returns false' do
- expect(lfs_token.deploy_key_pushable?(project)).to be_falsey
+ expect(lfs_token.deploy_key_pushable?(project)).to be false
end
end
@@ -187,27 +181,23 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
let(:can_push) { true }
it 'returns true' do
- expect(lfs_token.deploy_key_pushable?(project)).to be_truthy
+ expect(lfs_token.deploy_key_pushable?(project)).to be true
end
end
end
end
describe '#type' do
- let(:lfs_token) { described_class.new(actor) }
-
context 'when actor is not a User' do
- let(:actor) { create(:deploy_key) }
+ let(:actor) { deploy_key }
- it 'returns false' do
+ it 'returns :lfs_deploy_token type' do
expect(lfs_token.type).to eq(:lfs_deploy_token)
end
end
context 'when actor is a User' do
- let(:actor) { create(:user) }
-
- it 'returns false' do
+ it 'returns :lfs_token type' do
expect(lfs_token.type).to eq(:lfs_token)
end
end
@@ -215,8 +205,6 @@ describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
describe '#authentication_payload' do
it 'returns a Hash designed for gitlab-shell' do
- actor = create(:user)
- lfs_token = described_class.new(actor)
repo_http_path = 'http://localhost/user/repo.git'
authentication_payload = lfs_token.authentication_payload(repo_http_path)
diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb
index 7ae8baa31b5..ebf150d21ef 100644
--- a/spec/lib/gitlab/lograge/custom_options_spec.rb
+++ b/spec/lib/gitlab/lograge/custom_options_spec.rb
@@ -13,21 +13,16 @@ describe Gitlab::Lograge::CustomOptions do
}
end
- let(:event) do
- ActiveSupport::Notifications::Event.new(
- 'test',
- 1,
- 2,
- 'transaction_id',
- {
- params: params,
- user_id: 'test',
- cf_ray: SecureRandom.hex,
- cf_request_id: SecureRandom.hex,
- metadata: { 'meta.user' => 'jane.doe' }
- }
- )
+ let(:event_payload) do
+ {
+ params: params,
+ user_id: 'test',
+ cf_ray: SecureRandom.hex,
+ cf_request_id: SecureRandom.hex,
+ metadata: { 'meta.user' => 'jane.doe' }
+ }
end
+ let(:event) { ActiveSupport::Notifications::Event.new('test', 1, 2, 'transaction_id', event_payload) }
subject { described_class.call(event) }
@@ -49,6 +44,18 @@ describe Gitlab::Lograge::CustomOptions do
end
end
+ context 'with transaction' do
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new({}) }
+
+ before do
+ allow(Gitlab::Metrics::Transaction).to receive(:current).and_return(transaction)
+ end
+
+ it 'adds db counters' do
+ expect(subject).to include(:db_count, :db_write_count, :db_cached_count)
+ end
+ end
+
it 'adds the user id' do
expect(subject[:user_id]).to eq('test')
end
@@ -63,19 +70,23 @@ describe Gitlab::Lograge::CustomOptions do
end
context 'when metadata is missing' do
- let(:event) do
- ActiveSupport::Notifications::Event.new(
- 'test',
- 1,
- 2,
- 'transaction_id',
- { params: {} }
- )
- end
+ let(:event_payload) { { params: {} } }
it 'does not break' do
expect { subject }.not_to raise_error
end
end
+
+ context 'when correlation_id is overriden' do
+ let(:correlation_id_key) { Labkit::Correlation::CorrelationId::LOG_KEY }
+
+ before do
+ event_payload[correlation_id_key] = '123456'
+ end
+
+ it 'sets the overriden value' do
+ expect(subject[correlation_id_key]).to eq('123456')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/looping_batcher_spec.rb b/spec/lib/gitlab/looping_batcher_spec.rb
deleted file mode 100644
index b03e969c1e7..00000000000
--- a/spec/lib/gitlab/looping_batcher_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::LoopingBatcher, :use_clean_rails_memory_store_caching do
- describe '#next_range!' do
- let(:model_class) { LfsObject }
- let(:key) { 'looping_batcher_spec' }
- let(:batch_size) { 2 }
-
- subject { described_class.new(model_class, key: key, batch_size: batch_size).next_range! }
-
- context 'when there are no records' do
- it { is_expected.to be_nil }
- end
-
- context 'when there are records' do
- let!(:records) { create_list(model_class.underscore, 3) }
-
- context 'when it has never been called before' do
- it { is_expected.to be_a Range }
-
- it 'starts from the beginning' do
- expect(subject.first).to eq(1)
- end
-
- it 'ends at a full batch' do
- expect(subject.last).to eq(records.second.id)
- end
-
- context 'when the batch size is greater than the number of records' do
- let(:batch_size) { 5 }
-
- it 'ends at the last ID' do
- expect(subject.last).to eq(records.last.id)
- end
- end
- end
-
- context 'when it was called before' do
- context 'when the previous batch included the end of the table' do
- before do
- described_class.new(model_class, key: key, batch_size: model_class.count).next_range!
- end
-
- it 'starts from the beginning' do
- expect(subject).to eq(1..records.second.id)
- end
- end
-
- context 'when the previous batch did not include the end of the table' do
- before do
- described_class.new(model_class, key: key, batch_size: model_class.count - 1).next_range!
- end
-
- it 'starts after the previous batch' do
- expect(subject).to eq(records.last.id..records.last.id)
- end
- end
-
- context 'if cache is cleared' do
- it 'starts from the beginning' do
- Rails.cache.clear
-
- expect(subject).to eq(1..records.second.id)
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index d772b0c7a5f..2703339d89c 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -142,7 +142,7 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
describe '.find_all_paths' do
let(:all_dashboard_paths) { described_class.find_all_paths(project) }
- let(:system_dashboard) { { path: system_dashboard_path, display_name: 'Default', default: true, system_dashboard: true } }
+ let(:system_dashboard) { { path: system_dashboard_path, display_name: 'Default dashboard', default: true, system_dashboard: true } }
it 'includes only the system dashboard by default' do
expect(all_dashboard_paths).to eq([system_dashboard])
@@ -163,7 +163,7 @@ describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store_cachi
let(:self_monitoring_dashboard) do
{
path: self_monitoring_dashboard_path,
- display_name: 'Default',
+ display_name: 'Default dashboard',
default: true,
system_dashboard: false
}
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index b2fca0b5954..7250cefb9ff 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -16,7 +16,8 @@ describe Gitlab::Metrics::Dashboard::Processor do
Gitlab::Metrics::Dashboard::Stages::EndpointInserter,
Gitlab::Metrics::Dashboard::Stages::Sorter,
Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
- Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter
+ Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
+ Gitlab::Metrics::Dashboard::Stages::UrlValidator
]
end
@@ -201,6 +202,27 @@ describe Gitlab::Metrics::Dashboard::Processor do
it_behaves_like 'errors with message', 'Each "metric" must define one of :query or :query_range'
end
+
+ describe 'validating links' do
+ context 'when the links contain a blocked url' do
+ let(:dashboard_yml_links) do
+ [{ 'url' => 'http://1.1.1.1.1' }, { 'url' => 'https://gitlab.com' }]
+ end
+
+ let(:expected) do
+ [{ url: '' }, { url: 'https://gitlab.com' }]
+ end
+
+ before do
+ stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
+ dashboard_yml['links'] = dashboard_yml_links
+ end
+
+ it 'replaces the blocked url with an empty string' do
+ expect(dashboard[:links]).to eq(expected)
+ end
+ end
+ end
end
private
diff --git a/spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb
new file mode 100644
index 00000000000..305768ef060
--- /dev/null
+++ b/spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Metrics::ElasticsearchRackMiddleware do
+ let(:app) { double(:app, call: 'app call result') }
+ let(:middleware) { described_class.new(app) }
+ let(:env) { {} }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
+
+ describe '#call' do
+ let(:counter) { instance_double(Prometheus::Client::Counter, increment: nil) }
+ let(:histogram) { instance_double(Prometheus::Client::Histogram, observe: nil) }
+ let(:elasticsearch_query_time) { 0.1 }
+ let(:elasticsearch_requests_count) { 2 }
+
+ before do
+ allow(Gitlab::Instrumentation::ElasticsearchTransport).to receive(:query_time) { elasticsearch_query_time }
+ allow(Gitlab::Instrumentation::ElasticsearchTransport).to receive(:get_request_count) { elasticsearch_requests_count }
+
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:http_elasticsearch_requests_total,
+ an_instance_of(String),
+ Gitlab::Metrics::Transaction::BASE_LABELS)
+ .and_return(counter)
+
+ allow(Gitlab::Metrics).to receive(:histogram)
+ .with(:http_elasticsearch_requests_duration_seconds,
+ an_instance_of(String),
+ Gitlab::Metrics::Transaction::BASE_LABELS,
+ described_class::HISTOGRAM_BUCKETS)
+ .and_return(histogram)
+
+ allow(Gitlab::Metrics).to receive(:current_transaction).and_return(transaction)
+ end
+
+ it 'calls the app' do
+ expect(middleware.call(env)).to eq('app call result')
+ end
+
+ it 'records elasticsearch metrics' do
+ expect(counter).to receive(:increment).with(transaction.labels, elasticsearch_requests_count)
+ expect(histogram).to receive(:observe).with(transaction.labels, elasticsearch_query_time)
+
+ middleware.call(env)
+ end
+
+ it 'records elasticsearch metrics if an error is raised' do
+ expect(counter).to receive(:increment).with(transaction.labels, elasticsearch_requests_count)
+ expect(histogram).to receive(:observe).with(transaction.labels, elasticsearch_query_time)
+
+ allow(app).to receive(:call).with(env).and_raise(StandardError)
+
+ expect { middleware.call(env) }.to raise_error(StandardError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index 229db67ec88..035d875258c 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -26,7 +26,7 @@ describe Gitlab::Metrics::MethodCall do
context 'prometheus instrumentation is enabled' do
before do
- Feature.get(:prometheus_metrics_method_instrumentation).enable
+ stub_feature_flags(prometheus_metrics_method_instrumentation: true)
end
around do |example|
@@ -50,7 +50,7 @@ describe Gitlab::Metrics::MethodCall do
context 'prometheus instrumentation is disabled' do
before do
- Feature.get(:prometheus_metrics_method_instrumentation).disable
+ stub_feature_flags(prometheus_metrics_method_instrumentation: false)
end
it 'observes using NullMetric' do
diff --git a/spec/lib/gitlab/metrics/methods_spec.rb b/spec/lib/gitlab/metrics/methods_spec.rb
index bca94deb1d8..5cf8db55142 100644
--- a/spec/lib/gitlab/metrics/methods_spec.rb
+++ b/spec/lib/gitlab/metrics/methods_spec.rb
@@ -104,7 +104,7 @@ describe Gitlab::Metrics::Methods do
context 'when feature is enabled' do
before do
- Feature.get(feature_name).enable
+ stub_feature_flags(feature_name => true)
end
it "initializes #{metric_type} metric" do
@@ -118,7 +118,7 @@ describe Gitlab::Metrics::Methods do
context 'when feature is disabled' do
before do
- Feature.get(feature_name).disable
+ stub_feature_flags(feature_name => false)
end
it "returns NullMetric" do
diff --git a/spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb
new file mode 100644
index 00000000000..f2f36ccad20
--- /dev/null
+++ b/spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Metrics::RedisRackMiddleware do
+ let(:app) { double(:app) }
+ let(:middleware) { described_class.new(app) }
+ let(:env) { {} }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
+
+ before do
+ allow(app).to receive(:call).with(env).and_return('wub wub')
+ end
+
+ describe '#call' do
+ let(:counter) { double(Prometheus::Client::Counter, increment: nil) }
+ let(:histogram) { double(Prometheus::Client::Histogram, observe: nil) }
+ let(:redis_query_time) { 0.1 }
+ let(:redis_requests_count) { 2 }
+
+ before do
+ allow(Gitlab::Instrumentation::Redis).to receive(:query_time) { redis_query_time }
+ allow(Gitlab::Instrumentation::Redis).to receive(:get_request_count) { redis_requests_count }
+
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:http_redis_requests_total,
+ an_instance_of(String),
+ Gitlab::Metrics::Transaction::BASE_LABELS)
+ .and_return(counter)
+
+ allow(Gitlab::Metrics).to receive(:histogram)
+ .with(:http_redis_requests_duration_seconds,
+ an_instance_of(String),
+ Gitlab::Metrics::Transaction::BASE_LABELS,
+ Gitlab::Instrumentation::Redis::QUERY_TIME_BUCKETS)
+ .and_return(histogram)
+
+ allow(Gitlab::Metrics).to receive(:current_transaction).and_return(transaction)
+ end
+
+ it 'calls the app' do
+ expect(middleware.call(env)).to eq('wub wub')
+ end
+
+ it 'records redis metrics' do
+ expect(counter).to receive(:increment).with(transaction.labels, redis_requests_count)
+ expect(histogram).to receive(:observe).with(transaction.labels, redis_query_time)
+
+ middleware.call(env)
+ end
+
+ it 'records redis metrics if an error is raised' do
+ expect(counter).to receive(:increment).with(transaction.labels, redis_requests_count)
+ expect(histogram).to receive(:observe).with(transaction.labels, redis_query_time)
+
+ allow(app).to receive(:call).with(env).and_raise(StandardError)
+
+ expect { middleware.call(env) }.to raise_error(StandardError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
index fdf3b5bd045..087a0bfbac5 100644
--- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb
@@ -3,7 +3,17 @@
require 'spec_helper'
describe Gitlab::Metrics::Samplers::DatabaseSampler do
- subject { described_class.new(described_class::SAMPLING_INTERVAL_SECONDS) }
+ subject { described_class.new }
+
+ describe '#interval' do
+ it 'samples every five seconds by default' do
+ expect(subject.interval).to eq(5)
+ end
+
+ it 'samples at other intervals if requested' do
+ expect(described_class.new(11).interval).to eq(11)
+ end
+ end
describe '#sample' do
before do
diff --git a/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
index 1097d26c320..df63f2ebe28 100644
--- a/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/puma_sampler_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Metrics::Samplers::PumaSampler do
- subject { described_class.new(5) }
+ subject { described_class.new }
let(:null_metric) { double('null_metric', set: nil, observe: nil) }
@@ -11,6 +11,16 @@ describe Gitlab::Metrics::Samplers::PumaSampler do
allow(Gitlab::Metrics::NullMetric).to receive(:instance).and_return(null_metric)
end
+ describe '#interval' do
+ it 'samples every five seconds by default' do
+ expect(subject.interval).to eq(5)
+ end
+
+ it 'samples at other intervals if requested' do
+ expect(described_class.new(11).interval).to eq(11)
+ end
+ end
+
describe '#sample' do
before do
expect(subject).to receive(:puma_stats).and_return(puma_stats)
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index ead650a27f0..9fc8dd10922 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
describe Gitlab::Metrics::Samplers::RubySampler do
- let(:sampler) { described_class.new(5) }
+ let(:sampler) { described_class.new }
let(:null_metric) { double('null_metric', set: nil, observe: nil) }
before do
@@ -18,6 +18,16 @@ describe Gitlab::Metrics::Samplers::RubySampler do
end
end
+ describe '#interval' do
+ it 'samples every sixty seconds by default' do
+ expect(subject.interval).to eq(60)
+ end
+
+ it 'samples at other intervals if requested' do
+ expect(described_class.new(11).interval).to eq(11)
+ end
+ end
+
describe '#sample' do
it 'adds a metric containing the process resident memory bytes' do
expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).and_return(9000)
diff --git a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
index 67336cf83e6..ea9e8fa6795 100644
--- a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
@@ -10,17 +10,19 @@ describe Gitlab::Metrics::SidekiqMiddleware do
it 'tracks the transaction' do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
- expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
- .with(worker.class)
- .and_call_original
+ expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |transaction|
+ expect(transaction).to receive(:set).with(:sidekiq_queue_duration, instance_of(Float))
+ expect(transaction).to receive(:increment).with(:db_count, 1)
+ end
- expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
- .with(:sidekiq_queue_duration, instance_of(Float))
+ middleware.call(worker, message, :test) do
+ ActiveRecord::Base.connection.execute('SELECT pg_sleep(0.1);')
+ end
- middleware.call(worker, message, :test) { nil }
+ expect(message).to include(:db_count, :db_write_count, :db_cached_count)
end
- it 'tracks the transaction (for messages without `enqueued_at`)' do
+ it 'tracks the transaction (for messages without `enqueued_at`)', :aggregate_failures do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect(Gitlab::Metrics::BackgroundTransaction).to receive(:new)
@@ -33,7 +35,7 @@ describe Gitlab::Metrics::SidekiqMiddleware do
middleware.call(worker, {}, :test) { nil }
end
- it 'tracks any raised exceptions' do
+ it 'tracks any raised exceptions', :aggregate_failures do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect_any_instance_of(Gitlab::Metrics::Transaction)
@@ -44,6 +46,8 @@ describe Gitlab::Metrics::SidekiqMiddleware do
expect { middleware.call(worker, message, :test) }
.to raise_error(RuntimeError)
+
+ expect(message).to include(:db_count, :db_write_count, :db_cached_count)
end
end
end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index 1624cea8bda..a78d048908d 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -6,10 +6,15 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do
let(:env) { {} }
let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
let(:subscriber) { described_class.new }
+ let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10' } }
let(:event) do
- double(:event, duration: 2,
- payload: { sql: 'SELECT * FROM users WHERE id = 10' })
+ double(
+ :event,
+ name: 'sql.active_record',
+ duration: 2,
+ payload: payload
+ )
end
describe '#sql' do
@@ -23,6 +28,63 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
describe 'with a current transaction' do
+ shared_examples 'read only query' do
+ it 'increments only db count value' do
+ allow(subscriber).to receive(:current_transaction)
+ .at_least(:once)
+ .and_return(transaction)
+
+ expect(transaction).to receive(:increment)
+ .with(:db_count, 1)
+
+ expect(transaction).not_to receive(:increment)
+ .with(:db_cached_count, 1)
+
+ expect(transaction).not_to receive(:increment)
+ .with(:db_write_count, 1)
+
+ subscriber.sql(event)
+ end
+ end
+
+ shared_examples 'write query' do
+ it 'increments db_write_count and db_count value' do
+ expect(subscriber).to receive(:current_transaction)
+ .at_least(:once)
+ .and_return(transaction)
+
+ expect(transaction).to receive(:increment)
+ .with(:db_count, 1)
+
+ expect(transaction).not_to receive(:increment)
+ .with(:db_cached_count, 1)
+
+ expect(transaction).to receive(:increment)
+ .with(:db_write_count, 1)
+
+ subscriber.sql(event)
+ end
+ end
+
+ shared_examples 'cached query' do
+ it 'increments db_cached_count and db_count value' do
+ expect(subscriber).to receive(:current_transaction)
+ .at_least(:once)
+ .and_return(transaction)
+
+ expect(transaction).to receive(:increment)
+ .with(:db_count, 1)
+
+ expect(transaction).to receive(:increment)
+ .with(:db_cached_count, 1)
+
+ expect(transaction).not_to receive(:increment)
+ .with(:db_write_count, 1)
+
+ subscriber.sql(event)
+ end
+ end
+
it 'observes sql_duration metric' do
expect(subscriber).to receive(:current_transaction)
.at_least(:once)
@@ -31,18 +93,66 @@ describe Gitlab::Metrics::Subscribers::ActiveRecord do
subscriber.sql(event)
end
- it 'increments the :sql_duration value' do
- expect(subscriber).to receive(:current_transaction)
- .at_least(:once)
- .and_return(transaction)
+ it_behaves_like 'read only query'
+
+ context 'with select for update sql event' do
+ let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10 FOR UPDATE' } }
- expect(transaction).to receive(:increment)
- .with(:sql_duration, 2, false)
+ it_behaves_like 'write query'
+ end
- expect(transaction).to receive(:increment)
- .with(:sql_count, 1, false)
+ context 'with common table expression' do
+ context 'with insert' do
+ let(:payload) { { sql: 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' } }
- subscriber.sql(event)
+ it_behaves_like 'write query'
+ end
+
+ context 'with only select' do
+ let(:payload) { { sql: 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' } }
+
+ it_behaves_like 'read only query'
+ end
+ end
+
+ context 'with delete sql event' do
+ let(:payload) { { sql: 'DELETE FROM users where id = 10' } }
+
+ it_behaves_like 'write query'
+ end
+
+ context 'with insert sql event' do
+ let(:payload) { { sql: 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' } }
+
+ it_behaves_like 'write query'
+ end
+
+ context 'with update sql event' do
+ let(:payload) { { sql: 'UPDATE users SET admin = true WHERE id = 10' } }
+
+ it_behaves_like 'write query'
+ end
+
+ context 'with cached payload ' do
+ let(:payload) do
+ {
+ sql: 'SELECT * FROM users WHERE id = 10',
+ cached: true
+ }
+ end
+
+ it_behaves_like 'cached query'
+ end
+
+ context 'with cached payload name' do
+ let(:payload) do
+ {
+ sql: 'SELECT * FROM users WHERE id = 10',
+ name: 'CACHE'
+ }
+ end
+
+ it_behaves_like 'cached query'
end
context 'events are internal to Rails or irrelevant' do
diff --git a/spec/lib/gitlab/metrics/transaction_spec.rb b/spec/lib/gitlab/metrics/transaction_spec.rb
index cf46fa3e91c..693ec3cb7e7 100644
--- a/spec/lib/gitlab/metrics/transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/transaction_spec.rb
@@ -65,18 +65,16 @@ describe Gitlab::Metrics::Transaction do
describe '#add_event' do
let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil) }
- before do
- allow(described_class).to receive(:transaction_metric).and_return(prometheus_metric)
- end
-
it 'adds a metric' do
expect(prometheus_metric).to receive(:increment)
+ expect(described_class).to receive(:fetch_metric).with(:counter, :gitlab_transaction_event_meow_total).and_return(prometheus_metric)
transaction.add_event(:meow)
end
it 'allows tracking of custom tags' do
expect(prometheus_metric).to receive(:increment).with(hash_including(animal: "dog"))
+ expect(described_class).to receive(:fetch_metric).with(:counter, :gitlab_transaction_event_bau_total).and_return(prometheus_metric)
transaction.add_event(:bau, animal: 'dog')
end
@@ -84,6 +82,7 @@ describe Gitlab::Metrics::Transaction do
context 'with sensitive tags' do
before do
transaction.add_event(:baubau, **sensitive_tags.merge(sane: 'yes'))
+ allow(described_class).to receive(:transaction_metric).and_return(prometheus_metric)
end
it 'filters tags' do
@@ -93,4 +92,37 @@ describe Gitlab::Metrics::Transaction do
end
end
end
+
+ describe '#increment' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil) }
+
+ it 'adds a metric' do
+ expect(prometheus_metric).to receive(:increment).with(hash_including(:action, :controller), 1)
+ expect(described_class).to receive(:fetch_metric).with(:counter, :gitlab_transaction_meow_total).and_return(prometheus_metric)
+
+ transaction.increment(:meow, 1)
+ end
+ end
+
+ describe '#set' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, set: nil) }
+
+ it 'adds a metric' do
+ expect(prometheus_metric).to receive(:set).with(hash_including(:action, :controller), 1)
+ expect(described_class).to receive(:fetch_metric).with(:gauge, :gitlab_transaction_meow_total).and_return(prometheus_metric)
+
+ transaction.set(:meow, 1)
+ end
+ end
+
+ describe '#get' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, get: nil) }
+
+ it 'gets a metric' do
+ expect(described_class).to receive(:fetch_metric).with(:counter, :gitlab_transaction_meow_total).and_return(prometheus_metric)
+ expect(prometheus_metric).to receive(:get)
+
+ transaction.get(:meow, :counter)
+ end
+ end
end
diff --git a/spec/lib/gitlab/middleware/handle_ip_spoof_attack_error_spec.rb b/spec/lib/gitlab/middleware/handle_ip_spoof_attack_error_spec.rb
new file mode 100644
index 00000000000..ccfc5e93887
--- /dev/null
+++ b/spec/lib/gitlab/middleware/handle_ip_spoof_attack_error_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Middleware::HandleIpSpoofAttackError do
+ let(:spoof_error) { ActionDispatch::RemoteIp::IpSpoofAttackError.new('sensitive') }
+ let(:standard_error) { StandardError.new('error') }
+ let(:app) { -> (env) { env.is_a?(Exception) ? raise(env) : env } }
+
+ subject(:middleware) { described_class.new(app) }
+
+ it 'passes through the response from a valid upstream' do
+ expect(middleware.call(:response)).to eq(:response)
+ end
+
+ it 'translates an ActionDispatch::IpSpoofAttackError to a 400 response' do
+ expect(middleware.call(spoof_error))
+ .to eq([400, { 'Content-Type' => 'text/plain' }, ['Bad Request']])
+ end
+
+ it 'passes through the exception raised by an invalid upstream' do
+ expect { middleware.call(standard_error) }.to raise_error(standard_error)
+ end
+end
diff --git a/spec/lib/gitlab/monitor/demo_projects_spec.rb b/spec/lib/gitlab/monitor/demo_projects_spec.rb
new file mode 100644
index 00000000000..92024a3f9c1
--- /dev/null
+++ b/spec/lib/gitlab/monitor/demo_projects_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Monitor::DemoProjects do
+ describe '#primary_keys' do
+ subject { described_class.primary_keys }
+
+ it 'fetches primary_keys when on gitlab.com' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ allow(Gitlab).to receive(:staging?).and_return(false)
+
+ expect(subject).to eq(Gitlab::Monitor::DemoProjects::DOT_COM_IDS)
+ end
+
+ it 'fetches primary_keys when on staging' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ allow(Gitlab).to receive(:staging?).and_return(true)
+
+ expect(subject).to eq(Gitlab::Monitor::DemoProjects::STAGING_IDS)
+ end
+
+ it 'fetches all keys when in the dev or test env' do
+ project = create(:project)
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(true)
+
+ expect(subject).to eq([project.id])
+ end
+
+ it 'falls back on empty array' do
+ stub_config_setting(url: 'https://helloworld')
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
+ expect(subject).to eq([])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/no_cache_headers_spec.rb b/spec/lib/gitlab/no_cache_headers_spec.rb
index f011b55006e..c7a73f0e2dc 100644
--- a/spec/lib/gitlab/no_cache_headers_spec.rb
+++ b/spec/lib/gitlab/no_cache_headers_spec.rb
@@ -3,8 +3,11 @@
require 'spec_helper'
describe Gitlab::NoCacheHeaders do
- class NoCacheTester
- include Gitlab::NoCacheHeaders
+ before do
+ stub_const('NoCacheTester', Class.new)
+ NoCacheTester.class_eval do
+ include Gitlab::NoCacheHeaders
+ end
end
describe "#no_cache_headers" do
diff --git a/spec/lib/gitlab/pagination/keyset/request_context_spec.rb b/spec/lib/gitlab/pagination/keyset/request_context_spec.rb
index 6cd5ccc3c19..d6d5340f38b 100644
--- a/spec/lib/gitlab/pagination/keyset/request_context_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/request_context_spec.rb
@@ -60,9 +60,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
it 'sets Links header with same host/path as the original request' do
orig_uri = URI.parse(request_context.request.url)
- expect(request_context).to receive(:header) do |name, header|
- expect(name).to eq('Links')
-
+ expect(request_context).to receive(:header).twice do |name, header|
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
uri = URI.parse(first_link)
@@ -77,9 +75,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)
- expect(request_context).to receive(:header) do |name, header|
- expect(name).to eq('Links')
-
+ expect(request_context).to receive(:header).twice do |name, header|
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
query = CGI.parse(URI.parse(first_link).query)
@@ -97,9 +93,7 @@ describe Gitlab::Pagination::Keyset::RequestContext do
it 'sets Links header with a link to the next page' do
orig_uri = URI.parse(request_context.request.url)
- expect(request_context).to receive(:header) do |name, header|
- expect(name).to eq('Links')
-
+ expect(request_context).to receive(:header).twice do |name, header|
first_link, _ = /<([^>]+)>; rel="next"/.match(header).captures
query = CGI.parse(URI.parse(first_link).query)
diff --git a/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb b/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb
index 5f0e1f40231..b1c7f73489d 100644
--- a/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb
+++ b/spec/lib/gitlab/patch/action_dispatch_journey_formatter_spec.rb
@@ -7,7 +7,7 @@ describe Gitlab::Patch::ActionDispatchJourneyFormatter do
let(:project) { create(:project, namespace: group) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:url) { Gitlab::Routing.url_helpers.project_pipeline_url(project, pipeline) }
- let(:expected_path) { "#{project.full_path}/pipelines/#{pipeline.id}" }
+ let(:expected_path) { "#{project.full_path}/-/pipelines/#{pipeline.id}" }
context 'custom implementation of #missing_keys' do
before do
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 50b045c6aad..ac506c49100 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -3,6 +3,11 @@
require 'spec_helper'
describe Gitlab::PathRegex do
+ let(:starting_with_namespace) { %r{^/\*namespace_id/:(project_)?id} }
+ let(:non_param_parts) { %r{[^:*][a-z\-_/]*} }
+ let(:any_other_path_part) { %r{[a-z\-_/:]*} }
+ let(:wildcard_segment) { /\*/ }
+
# Pass in a full path to remove the format segment:
# `/ci/lint(.:format)` -> `/ci/lint`
def without_format(path)
@@ -14,7 +19,7 @@ describe Gitlab::PathRegex do
# `/*namespace_id/:project_id/builds/artifacts/*ref_name_and_path`
# -> 'builds/artifacts'
def path_before_wildcard(path)
- path = path.gsub(STARTING_WITH_NAMESPACE, "")
+ path = path.gsub(starting_with_namespace, "")
path_segments = path.split('/').reject(&:empty?)
wildcard_index = path_segments.index { |segment| parameter?(segment) }
@@ -121,13 +126,9 @@ describe Gitlab::PathRegex do
# - Followed by one or more path-parts not starting with `:` or `*`
# - Followed by a path-part that includes a wildcard parameter `*`
# At the time of writing these routes match: http://rubular.com/r/Rv2pDE5Dvw
- STARTING_WITH_NAMESPACE = %r{^/\*namespace_id/:(project_)?id}.freeze
- NON_PARAM_PARTS = %r{[^:*][a-z\-_/]*}.freeze
- ANY_OTHER_PATH_PART = %r{[a-z\-_/:]*}.freeze
- WILDCARD_SEGMENT = /\*/.freeze
let(:namespaced_wildcard_routes) do
routes_without_format.select do |p|
- p =~ %r{#{STARTING_WITH_NAMESPACE}/#{NON_PARAM_PARTS}/#{ANY_OTHER_PATH_PART}#{WILDCARD_SEGMENT}}
+ p =~ %r{#{starting_with_namespace}/#{non_param_parts}/#{any_other_path_part}#{wildcard_segment}}
end
end
@@ -145,16 +146,14 @@ describe Gitlab::PathRegex do
end.uniq
end
- STARTING_WITH_GROUP = %r{^/groups/\*(group_)?id/}.freeze
+ let(:starting_with_group) { %r{^/groups/\*(group_)?id/} }
let(:group_routes) do
- routes_without_format.select do |path|
- path =~ STARTING_WITH_GROUP
- end
+ routes_without_format.grep(starting_with_group)
end
let(:paths_after_group_id) do
group_routes.map do |route|
- route.gsub(STARTING_WITH_GROUP, '').split('/').first
+ route.gsub(starting_with_group, '').split('/').first
end.uniq
end
diff --git a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
index b2a63e4f026..4935ef1bd90 100644
--- a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
@@ -66,7 +66,7 @@ describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
end
expect(set_data).to eq({ classname: 'Issue', database_id: issue.id.to_s })
- expect(ttl).to be_within(1.second).of(StuckImportJobsWorker::IMPORT_JOBS_EXPIRATION)
+ expect(ttl).to be_within(1.second).of(Gitlab::Import::StuckImportJob::IMPORT_JOBS_EXPIRATION)
end
end
diff --git a/spec/lib/gitlab/process_memory_cache/helper_spec.rb b/spec/lib/gitlab/process_memory_cache/helper_spec.rb
new file mode 100644
index 00000000000..890642b1d5e
--- /dev/null
+++ b/spec/lib/gitlab/process_memory_cache/helper_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::ProcessMemoryCache::Helper, :use_clean_rails_memory_store_caching do
+ let(:minimal_test_class) do
+ Class.new do
+ include Gitlab::ProcessMemoryCache::Helper
+
+ def cached_content
+ fetch_memory_cache(:cached_content_instance_key) { expensive_computation }
+ end
+
+ def clear_cached_content
+ invalidate_memory_cache(:cached_content_instance_key)
+ end
+ end
+ end
+
+ before do
+ stub_const("MinimalTestClass", minimal_test_class)
+ end
+
+ subject { MinimalTestClass.new }
+
+ describe '.fetch_memory_cache' do
+ it 'memoizes the result' do
+ is_expected.to receive(:expensive_computation).once.and_return(1)
+
+ 2.times do
+ expect(subject.cached_content).to eq(1)
+ end
+ end
+
+ it 'resets the cache when the shared key is missing', :aggregate_failures do
+ expect(Rails.cache).to receive(:read).with(:cached_content_instance_key).twice.and_return(nil)
+ is_expected.to receive(:expensive_computation).thrice.and_return(1, 2, 3)
+
+ 3.times do |index|
+ expect(subject.cached_content).to eq(index + 1)
+ end
+ end
+ end
+
+ describe '.invalidate_memory_cache' do
+ it 'invalidates the cache' do
+ is_expected.to receive(:expensive_computation).twice.and_return(1, 2)
+
+ expect { subject.clear_cached_content }.to change { subject.cached_content }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 64f80b5d736..aa52949ed60 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -34,7 +34,7 @@ describe Gitlab::ProjectSearchResults do
'blobs' | :limited_blobs_count | max_limited_count
'notes' | :limited_notes_count | max_limited_count
'wiki_blobs' | :wiki_blobs_count | '1234'
- 'commits' | :commits_count | '1234'
+ 'commits' | :commits_count | max_limited_count
'projects' | :limited_projects_count | max_limited_count
'unknown' | nil | nil
end
@@ -386,6 +386,19 @@ describe Gitlab::ProjectSearchResults do
end
end
+ describe '#commits_count' do
+ let(:project) { create(:project, :public, :repository) }
+
+ it 'limits the number of commits requested' do
+ expect(project.repository)
+ .to receive(:find_commits_by_message)
+ .with(anything, anything, anything, described_class::COUNT_LIMIT)
+ .and_call_original
+
+ described_class.new(user, project, '.').commits_count
+ end
+ end
+
# Examples for commit access level test
#
# params:
@@ -452,6 +465,54 @@ describe Gitlab::ProjectSearchResults do
end
describe 'commit search' do
+ context 'pagination' do
+ let(:project) { create(:project, :public, :repository) }
+
+ it 'returns the correct results for each page' do
+ expect(results_page(1)).to contain_exactly(commit('b83d6e391c22777fca1ed3012fce84f633d7fed0'))
+
+ expect(results_page(2)).to contain_exactly(commit('498214de67004b1da3d820901307bed2a68a8ef6'))
+
+ expect(results_page(3)).to contain_exactly(commit('1b12f15a11fc6e62177bef08f47bc7b5ce50b141'))
+ end
+
+ it 'returns the correct number of pages' do
+ expect(results_page(1).total_pages).to eq(project.repository.commit_count)
+ end
+
+ context 'limiting requested commits' do
+ context 'on page 1' do
+ it "limits to #{described_class::COUNT_LIMIT}" do
+ expect(project.repository)
+ .to receive(:find_commits_by_message)
+ .with(anything, anything, anything, described_class::COUNT_LIMIT)
+ .and_call_original
+
+ results_page(1)
+ end
+ end
+
+ context 'on subsequent pages' do
+ it "limits to #{described_class::COUNT_LIMIT} plus page offset" do
+ expect(project.repository)
+ .to receive(:find_commits_by_message)
+ .with(anything, anything, anything, described_class::COUNT_LIMIT + 1)
+ .and_call_original
+
+ results_page(2)
+ end
+ end
+ end
+
+ def results_page(page)
+ described_class.new(user, project, '.').objects('commits', per_page: 1, page: page)
+ end
+
+ def commit(hash)
+ project.repository.commit(hash)
+ end
+ end
+
context 'by commit message' do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.repository.commit('59e29889be61e6e0e5e223bfa9ac2721d31605b8') }
@@ -469,6 +530,18 @@ describe Gitlab::ProjectSearchResults do
expect(commits).to be_empty
end
+ context 'when repository_ref is provided' do
+ let(:message) { 'Feature added' }
+ let(:repository_ref) { 'feature' }
+
+ it 'searches in the specified ref' do
+ commits = described_class.new(user, project, message, repository_ref).objects('commits')
+
+ # This commit is unique to the feature branch
+ expect(commits).to contain_exactly(project.repository.commit('0b4bc9a49b562e85de7cc9e834518ea6828729b9'))
+ end
+ end
+
it_behaves_like 'access restricted commits' do
let(:search_phrase) { message }
let(:commit) { project.repository.commit('59e29889be61e6e0e5e223bfa9ac2721d31605b8') }
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index aa18a1a843c..35f79042df0 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -4,34 +4,17 @@ require 'spec_helper'
describe Gitlab::ProjectTemplate do
describe '.all' do
- it 'returns a all templates' do
- expected = [
- described_class.new('rails', 'Ruby on Rails', 'Includes an MVC structure, .gitignore, Gemfile, and more great stuff', 'https://gitlab.com/gitlab-org/project-templates/rails'),
- described_class.new('spring', 'Spring', 'Includes an MVC structure, .gitignore, Gemfile, and more great stuff', 'https://gitlab.com/gitlab-org/project-templates/spring'),
- described_class.new('express', 'NodeJS Express', 'Includes an MVC structure, .gitignore, Gemfile, and more great stuff', 'https://gitlab.com/gitlab-org/project-templates/express'),
- described_class.new('iosswift', 'iOS (Swift)', 'A ready-to-go template for use with iOS Swift apps.', 'https://gitlab.com/gitlab-org/project-templates/iosswift'),
- described_class.new('dotnetcore', '.NET Core', 'A .NET Core console application template, customizable for any .NET Core project', 'https://gitlab.com/gitlab-org/project-templates/dotnetcore'),
- described_class.new('android', 'Android', 'A ready-to-go template for use with Android apps.', 'https://gitlab.com/gitlab-org/project-templates/android'),
- described_class.new('gomicro', 'Go Micro', 'Go Micro is a framework for micro service development.', 'https://gitlab.com/gitlab-org/project-templates/go-micro'),
- described_class.new('gatsby', 'Pages/Gatsby', 'Everything you need to get started using a Gatsby site.', 'https://gitlab.com/pages/gatsby'),
- described_class.new('hugo', 'Pages/Hugo', 'Everything you need to get started using a Hugo Pages site.', 'https://gitlab.com/pages/hugo'),
- described_class.new('jekyll', 'Pages/Jekyll', 'Everything you need to get started using a Jekyll Pages site.', 'https://gitlab.com/pages/jekyll'),
- described_class.new('plainhtml', 'Pages/Plain HTML', 'Everything you need to get started using a plain HTML Pages site.', 'https://gitlab.com/pages/plain-html'),
- described_class.new('gitbook', 'Pages/GitBook', 'Everything you need to get started using a GitBook Pages site.', 'https://gitlab.com/pages/gitbook'),
- described_class.new('hexo', 'Pages/Hexo', 'Everything you need to get started using a Hexo Pages site.', 'https://gitlab.com/pages/hexo'),
- described_class.new('sse_middleman', 'Static Site Editor/Middleman', _('Middleman project with Static Site Editor support'), 'https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman'),
- described_class.new('nfhugo', 'Netlify/Hugo', _('A Hugo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhugo'),
- described_class.new('nfjekyll', 'Netlify/Jekyll', _('A Jekyll site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfjekyll'),
- described_class.new('nfplainhtml', 'Netlify/Plain HTML', _('A plain HTML site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfplain-html'),
- described_class.new('nfgitbook', 'Netlify/GitBook', _('A GitBook site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfgitbook'),
- described_class.new('nfhexo', 'Netlify/Hexo', _('A Hexo site that uses Netlify for CI/CD instead of GitLab, but still with all the other great GitLab features.'), 'https://gitlab.com/pages/nfhexo'),
- described_class.new('salesforcedx', 'SalesforceDX', _('A project boilerplate for Salesforce App development with Salesforce Developer tools.'), 'https://gitlab.com/gitlab-org/project-templates/salesforcedx'),
- described_class.new('serverless_framework', 'Serverless Framework/JS', _('A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages'), 'https://gitlab.com/gitlab-org/project-templates/serverless-framework', 'illustrations/logos/serverless_framework.svg'),
- described_class.new('cluster_management', 'GitLab Cluster Management', _('An example project for managing Kubernetes clusters integrated with GitLab.'), 'https://gitlab.com/gitlab-org/project-templates/cluster-management')
+ it 'returns all templates' do
+ expected = %w[
+ rails spring express iosswift dotnetcore android
+ gomicro gatsby hugo jekyll plainhtml gitbook
+ hexo sse_middleman nfhugo nfjekyll nfplainhtml
+ nfgitbook nfhexo salesforcedx serverless_framework
+ cluster_management
]
expect(described_class.all).to be_an(Array)
- expect(described_class.all).to eq(expected)
+ expect(described_class.all.map(&:name)).to match_array(expected)
end
end
diff --git a/spec/lib/gitlab/prometheus/query_variables_spec.rb b/spec/lib/gitlab/prometheus/query_variables_spec.rb
index d8f8a2b7e7c..7dfa4de35d6 100644
--- a/spec/lib/gitlab/prometheus/query_variables_spec.rb
+++ b/spec/lib/gitlab/prometheus/query_variables_spec.rb
@@ -7,8 +7,9 @@ describe Gitlab::Prometheus::QueryVariables do
let(:project) { environment.project }
let(:environment) { create(:environment) }
let(:slug) { environment.slug }
+ let(:params) { {} }
- subject { described_class.call(environment) }
+ subject { described_class.call(environment, params) }
it { is_expected.to include(ci_environment_slug: slug) }
it { is_expected.to include(ci_project_name: project.name) }
@@ -53,5 +54,42 @@ describe Gitlab::Prometheus::QueryVariables do
it { is_expected.to include(kube_namespace: kube_namespace) }
end
end
+
+ context '__range' do
+ context 'when start_time and end_time are present' do
+ let(:params) do
+ {
+ start_time: Time.rfc3339('2020-05-29T07:23:05.008Z'),
+ end_time: Time.rfc3339('2020-05-29T15:23:05.008Z')
+ }
+ end
+
+ it { is_expected.to include(__range: "#{8.hours.to_i}s") }
+ end
+
+ context 'when start_time and end_time are not present' do
+ it { is_expected.to include(__range: nil) }
+ end
+
+ context 'when end_time is not present' do
+ let(:params) do
+ {
+ start_time: Time.rfc3339('2020-05-29T07:23:05.008Z')
+ }
+ end
+
+ it { is_expected.to include(__range: nil) }
+ end
+
+ context 'when start_time is not present' do
+ let(:params) do
+ {
+ end_time: Time.rfc3339('2020-05-29T07:23:05.008Z')
+ }
+ end
+
+ it { is_expected.to include(__range: nil) }
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb
index 4ff53b50a50..749192e5795 100644
--- a/spec/lib/gitlab/prometheus_client_spec.rb
+++ b/spec/lib/gitlab/prometheus_client_spec.rb
@@ -171,6 +171,58 @@ describe Gitlab::PrometheusClient do
end
end
+ describe '#aggregate' do
+ let(:query) { 'avg (metric) by (job)' }
+ let(:prometheus_response) do
+ {
+ "status": "success",
+ "data": {
+ "resultType": "vector",
+ "result": [
+ {
+ "metric": { "job" => "gitlab-rails" },
+ "value": [1488758662.506, "1"]
+ },
+ {
+ "metric": { "job" => "gitlab-sidekiq" },
+ "value": [1488758662.506, "2"]
+ }
+ ]
+ }
+ }
+ end
+ let(:query_url) { prometheus_query_with_time_url(query, Time.now.utc) }
+
+ around do |example|
+ Timecop.freeze { example.run }
+ end
+
+ context 'when request returns vector results' do
+ it 'returns data from the API call grouped by labels' do
+ req_stub = stub_prometheus_request(query_url, body: prometheus_response)
+
+ expect(subject.aggregate(query)).to eq({
+ { "job" => "gitlab-rails" } => 1,
+ { "job" => "gitlab-sidekiq" } => 2
+ })
+ expect(req_stub).to have_been_requested
+ end
+ end
+
+ context 'when request returns no data' do
+ it 'returns {}' do
+ req_stub = stub_prometheus_request(query_url, body: prometheus_empty_body('vector'))
+
+ expect(subject.aggregate(query)).to eq({})
+ expect(req_stub).to have_been_requested
+ end
+ end
+
+ it_behaves_like 'failure response' do
+ let(:execute_query) { subject.aggregate(query) }
+ end
+ end
+
describe '#series' do
let(:query_url) { prometheus_series_url('series_name', 'other_service') }
diff --git a/spec/lib/gitlab/redis/wrapper_spec.rb b/spec/lib/gitlab/redis/wrapper_spec.rb
index e4cc42130db..51a36eb062c 100644
--- a/spec/lib/gitlab/redis/wrapper_spec.rb
+++ b/spec/lib/gitlab/redis/wrapper_spec.rb
@@ -18,7 +18,21 @@ describe Gitlab::Redis::Wrapper do
let(:config_env_variable_url) {"TEST_GITLAB_REDIS_URL"}
let(:class_redis_url) { Gitlab::Redis::Wrapper::DEFAULT_REDIS_URL }
- include_examples "redis_shared_examples"
+ include_examples "redis_shared_examples" do
+ before do
+ allow(described_class).to receive(:instrumentation_class) do
+ ::Gitlab::Instrumentation::Redis::Cache
+ end
+ end
+ end
+
+ describe '.instrumentation_class' do
+ it 'raises a NotImplementedError' do
+ expect(described_class).to receive(:instrumentation_class).and_call_original
+
+ expect { described_class.instrumentation_class }.to raise_error(NotImplementedError)
+ end
+ end
describe '.config_file_path' do
it 'returns the absolute path to the configuration file' do
diff --git a/spec/lib/gitlab/reference_extractor_spec.rb b/spec/lib/gitlab/reference_extractor_spec.rb
index 8ea591c6f74..dd16f3c6035 100644
--- a/spec/lib/gitlab/reference_extractor_spec.rb
+++ b/spec/lib/gitlab/reference_extractor_spec.rb
@@ -204,7 +204,7 @@ describe Gitlab::ReferenceExtractor do
issue]
end
- it 'returns only Jira issues if the internal one does not exists' do
+ it 'returns only Jira issues if the internal one does not exist' do
subject.analyze("JIRA-123 and FOOBAR-4567 and ##{non_existing_record_iid}")
expect(subject.issues).to eq [ExternalIssue.new('JIRA-123', project),
ExternalIssue.new('FOOBAR-4567', project)]
@@ -236,7 +236,7 @@ describe Gitlab::ReferenceExtractor do
expect(subject.issues).to eq([issue])
end
- it 'does not return any issue if the internal one does not exists' do
+ it 'does not return any issue if the internal one does not exist' do
subject.analyze("JIRA-123 and FOOBAR-4567 and #999")
expect(subject.issues).to be_empty
end
@@ -296,7 +296,7 @@ describe Gitlab::ReferenceExtractor do
end
it 'returns all supported prefixes' do
- expect(prefixes.keys.uniq).to match_array(%w(@ # ~ % ! $ &))
+ expect(prefixes.keys.uniq).to match_array(%w(@ # ~ % ! $ & *iteration:))
end
it 'does not allow one prefix for multiple referables if not allowed specificly' do
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 9e596400904..2f220272651 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
describe Gitlab::Regex do
shared_examples_for 'project/group name regex' do
@@ -163,4 +163,136 @@ describe Gitlab::Regex do
it { is_expected.not_to match('-foo-') }
it { is_expected.not_to match('foo/bar') }
end
+
+ describe '.conan_file_name_regex' do
+ subject { described_class.conan_file_name_regex }
+
+ it { is_expected.to match('conanfile.py') }
+ it { is_expected.to match('conan_package.tgz') }
+ it { is_expected.not_to match('foo.txt') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.conan_package_reference_regex' do
+ subject { described_class.conan_package_reference_regex }
+
+ it { is_expected.to match('123456789') }
+ it { is_expected.to match('asdf1234') }
+ it { is_expected.not_to match('@foo') }
+ it { is_expected.not_to match('0/pack+age/1@1/0') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.conan_revision_regex' do
+ subject { described_class.conan_revision_regex }
+
+ it { is_expected.to match('0') }
+ it { is_expected.not_to match('foo') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.conan_recipe_component_regex' do
+ subject { described_class.conan_recipe_component_regex }
+
+ let(:fifty_one_characters) { 'f_a' * 17}
+
+ it { is_expected.to match('foobar') }
+ it { is_expected.to match('foo_bar') }
+ it { is_expected.to match('foo+bar') }
+ it { is_expected.to match('_foo+bar-baz+1.0') }
+ it { is_expected.to match('1.0.0') }
+ it { is_expected.not_to match('-foo_bar') }
+ it { is_expected.not_to match('+foo_bar') }
+ it { is_expected.not_to match('.foo_bar') }
+ it { is_expected.not_to match('foo@bar') }
+ it { is_expected.not_to match('foo/bar') }
+ it { is_expected.not_to match('!!()()') }
+ it { is_expected.not_to match(fifty_one_characters) }
+ end
+
+ describe '.package_name_regex' do
+ subject { described_class.package_name_regex }
+
+ it { is_expected.to match('123') }
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo/bar') }
+ it { is_expected.to match('@foo/bar') }
+ it { is_expected.to match('com/mycompany/app/my-app') }
+ it { is_expected.to match('my-package/1.0.0@my+project+path/beta') }
+ it { is_expected.not_to match('my-package/1.0.0@@@@@my+project+path/beta') }
+ it { is_expected.not_to match('$foo/bar') }
+ it { is_expected.not_to match('@foo/@/bar') }
+ it { is_expected.not_to match('@@foo/bar') }
+ it { is_expected.not_to match('my package name') }
+ it { is_expected.not_to match('!!()()') }
+ it { is_expected.not_to match("..\n..\foo") }
+ end
+
+ describe '.maven_file_name_regex' do
+ subject { described_class.maven_file_name_regex }
+
+ it { is_expected.to match('123') }
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo+bar-2_0.pom') }
+ it { is_expected.to match('foo.bar.baz-2.0-20190901.47283-1.jar') }
+ it { is_expected.to match('maven-metadata.xml') }
+ it { is_expected.to match('1.0-SNAPSHOT') }
+ it { is_expected.not_to match('../../foo') }
+ it { is_expected.not_to match('..\..\foo') }
+ it { is_expected.not_to match('%2f%2e%2e%2f%2essh%2fauthorized_keys') }
+ it { is_expected.not_to match('$foo/bar') }
+ it { is_expected.not_to match('my file name') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.maven_path_regex' do
+ subject { described_class.maven_path_regex }
+
+ it { is_expected.to match('123') }
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo/bar') }
+ it { is_expected.to match('@foo/bar') }
+ it { is_expected.to match('com/mycompany/app/my-app') }
+ it { is_expected.to match('com/mycompany/app/my-app/1.0-SNAPSHOT') }
+ it { is_expected.to match('com/mycompany/app/my-app/1.0-SNAPSHOT+debian64') }
+ it { is_expected.not_to match('com/mycompany/app/my+app/1.0-SNAPSHOT') }
+ it { is_expected.not_to match('$foo/bar') }
+ it { is_expected.not_to match('@foo/@/bar') }
+ it { is_expected.not_to match('my package name') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.semver_regex' do
+ subject { described_class.semver_regex }
+
+ it { is_expected.to match('1.2.3') }
+ it { is_expected.to match('1.2.3-beta') }
+ it { is_expected.to match('1.2.3-alpha.3') }
+ it { is_expected.not_to match('1') }
+ it { is_expected.not_to match('1.2') }
+ it { is_expected.not_to match('1./2.3') }
+ it { is_expected.not_to match('../../../../../1.2.3') }
+ it { is_expected.not_to match('%2e%2e%2f1.2.3') }
+ end
+
+ describe '.go_package_regex' do
+ subject { described_class.go_package_regex }
+
+ it { is_expected.to match('example.com') }
+ it { is_expected.to match('example.com/foo') }
+ it { is_expected.to match('example.com/foo/bar') }
+ it { is_expected.to match('example.com/foo/bar/baz') }
+ it { is_expected.to match('tl.dr.foo.bar.baz') }
+ end
+
+ describe '.unbounded_semver_regex' do
+ subject { described_class.unbounded_semver_regex }
+
+ it { is_expected.to match('1.2.3') }
+ it { is_expected.to match('1.2.3-beta') }
+ it { is_expected.to match('1.2.3-alpha.3') }
+ it { is_expected.not_to match('1') }
+ it { is_expected.not_to match('1.2') }
+ it { is_expected.not_to match('1./2.3') }
+ end
end
diff --git a/spec/lib/gitlab/routing_spec.rb b/spec/lib/gitlab/routing_spec.rb
index 965564cb83b..5446d6559fe 100644
--- a/spec/lib/gitlab/routing_spec.rb
+++ b/spec/lib/gitlab/routing_spec.rb
@@ -22,4 +22,25 @@ describe Gitlab::Routing do
expect(subject).to respond_to(:namespace_project_path)
end
end
+
+ describe Gitlab::Routing::LegacyRedirector do
+ subject { described_class.new(:wikis) }
+
+ let(:request) { double(:request, path: path, query_string: '') }
+ let(:path) { '/gitlab-org/gitlab-test/wikis/home' }
+
+ it 'returns "-" scoped url' do
+ expect(subject.call({}, request)).to eq('/gitlab-org/gitlab-test/-/wikis/home')
+ end
+
+ context 'invalid uri characters' do
+ let(:path) { '/gitlab-org/gitlab-test/wikis/home[' }
+
+ it 'raises error' do
+ expect do
+ subject.call({}, request)
+ end.to raise_error(ActionController::RoutingError)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/rugged_instrumentation_spec.rb b/spec/lib/gitlab/rugged_instrumentation_spec.rb
index 64c0ce1b65e..d6f3fb9be55 100644
--- a/spec/lib/gitlab/rugged_instrumentation_spec.rb
+++ b/spec/lib/gitlab/rugged_instrumentation_spec.rb
@@ -7,10 +7,10 @@ describe Gitlab::RuggedInstrumentation, :request_store do
describe '.query_time' do
it 'increments query times' do
- subject.query_time += 0.451
- subject.query_time += 0.322
+ subject.add_query_time(0.4510004)
+ subject.add_query_time(0.3220004)
- expect(subject.query_time).to be_within(0.001).of(0.773)
+ expect(subject.query_time).to eq(0.773001)
expect(subject.query_time_ms).to eq(773.0)
end
end
diff --git a/spec/lib/gitlab/search_context/builder_spec.rb b/spec/lib/gitlab/search_context/builder_spec.rb
new file mode 100644
index 00000000000..1707b54b273
--- /dev/null
+++ b/spec/lib/gitlab/search_context/builder_spec.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SearchContext::Builder, type: :controller do
+ controller(ApplicationController) { }
+
+ subject(:builder) { described_class.new(controller.view_context) }
+
+ shared_examples "has a fluid interface" do
+ it { is_expected.to be_instance_of(described_class) }
+ end
+
+ def expected_project_metadata(project)
+ return {} if project.nil?
+
+ a_hash_including(project_path: project.path,
+ name: project.name,
+ issues_path: a_string_including("/issues"),
+ mr_path: a_string_including("/merge_requests"),
+ issues_disabled: !project.issues_enabled?)
+ end
+
+ def expected_group_metadata(group)
+ return {} if group.nil?
+
+ a_hash_including(group_path: group.path,
+ name: group.name,
+ issues_path: a_string_including("/issues"),
+ mr_path: a_string_including("/merge_requests"))
+ end
+
+ def expected_search_url(project, group)
+ if project
+ search_path(project_id: project.id)
+ elsif group
+ search_path(group_id: group.id)
+ else
+ search_path
+ end
+ end
+
+ def be_search_context(project: nil, group: nil, snippets: [], ref: nil)
+ group = project ? project.group : group
+ snippets.compact!
+ ref = ref
+
+ have_attributes(
+ project: project,
+ group: group,
+ ref: ref,
+ snippets: snippets,
+ project_metadata: expected_project_metadata(project),
+ group_metadata: expected_group_metadata(group),
+ search_url: expected_search_url(project, group)
+ )
+ end
+
+ describe '#with_project' do
+ let(:project) { create(:project) }
+
+ subject { builder.with_project(project) }
+
+ it_behaves_like "has a fluid interface"
+
+ describe '#build!' do
+ subject(:context) { builder.with_project(project).build! }
+
+ context 'when a project is not owned by a group' do
+ it { is_expected.to be_for_project }
+ it { is_expected.to be_search_context(project: project) }
+ end
+
+ context 'when a project is owned by a group' do
+ let(:project) { create(:project, group: create(:group)) }
+
+ it 'delegates to `#with_group`' do
+ expect(builder).to receive(:with_group).with(project.group)
+ expect(context).to be
+ end
+
+ it { is_expected.to be_search_context(project: project, group: project.group) }
+ end
+ end
+ end
+
+ describe '#with_snippet' do
+ context 'when there is a single snippet' do
+ let(:snippet) { create(:snippet) }
+
+ subject { builder.with_snippet(snippet) }
+
+ it_behaves_like "has a fluid interface"
+
+ describe '#build!' do
+ subject(:context) { builder.with_snippet(snippet).build! }
+
+ it { is_expected.to be_for_snippet }
+ it { is_expected.to be_search_context(snippets: [snippet]) }
+ end
+ end
+
+ context 'when there are multiple snippets' do
+ let(:snippets) { create_list(:snippet, 3) }
+
+ describe '#build!' do
+ subject(:context) do
+ snippets.each(&builder.method(:with_snippet))
+ builder.build!
+ end
+
+ it { is_expected.to be_for_snippet }
+ it { is_expected.to be_search_context(snippets: snippets) }
+ end
+ end
+ end
+
+ describe '#with_group' do
+ let(:group) { create(:group) }
+
+ subject { builder.with_group(group) }
+
+ it_behaves_like "has a fluid interface"
+
+ describe '#build!' do
+ subject(:context) { builder.with_group(group).build! }
+
+ it { is_expected.to be_for_group }
+ it { is_expected.to be_search_context(group: group) }
+ end
+ end
+
+ describe '#with_ref' do
+ let(:ref) { Gitlab::Git::EMPTY_TREE_ID }
+
+ subject { builder.with_ref(ref) }
+
+ it_behaves_like "has a fluid interface"
+
+ describe '#build!' do
+ subject(:context) { builder.with_ref(ref).build! }
+
+ it { is_expected.to be_search_context(ref: ref) }
+ end
+ end
+
+ describe '#build!' do
+ subject(:context) { builder.build! }
+
+ it { is_expected.to be_a(Gitlab::SearchContext) }
+ end
+end
diff --git a/spec/lib/gitlab/search_context/controller_concern_spec.rb b/spec/lib/gitlab/search_context/controller_concern_spec.rb
new file mode 100644
index 00000000000..16784cafb76
--- /dev/null
+++ b/spec/lib/gitlab/search_context/controller_concern_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::SearchContext::ControllerConcern, type: :controller do
+ controller(ApplicationController) do
+ include Gitlab::SearchContext::ControllerConcern
+ end
+
+ let(:project) { nil }
+ let(:group) { nil }
+ let(:snippet) { nil }
+ let(:snippets) { [] }
+ let(:ref) { nil }
+
+ let(:builder) { Gitlab::SearchContext::Builder.new(controller.view_context) }
+
+ subject(:search_context) { controller.search_context }
+
+ def weak_assign(ivar, value)
+ return if value.nil?
+
+ controller.instance_variable_set(ivar.to_sym, value)
+ end
+
+ before do
+ weak_assign(:@project, project)
+ weak_assign(:@group, group)
+ weak_assign(:@ref, ref)
+ weak_assign(:@snippet, snippet)
+ weak_assign(:@snippets, snippets)
+
+ allow(Gitlab::SearchContext::Builder).to receive(:new).and_return(builder)
+ end
+
+ shared_examples 'has the proper context' do
+ it :aggregate_failures do
+ expected_group = project ? project.group : group
+ expected_snippets = [snippet, *snippets].compact
+
+ expect(builder).to receive(:with_project).with(project).and_call_original if project
+ expect(builder).to receive(:with_group).with(expected_group).and_call_original if expected_group
+ expect(builder).to receive(:with_ref).with(ref).and_call_original if ref
+ expected_snippets.each do |snippet|
+ expect(builder).to receive(:with_snippet).with(snippet).and_call_original
+ end
+
+ is_expected.to be_a(Gitlab::SearchContext)
+ end
+ end
+
+ context 'exposing @project' do
+ let(:project) { create(:project) }
+
+ it_behaves_like 'has the proper context'
+
+ context 'when the project is owned by a group' do
+ let(:project) { create(:project, group: create(:group)) }
+
+ it_behaves_like 'has the proper context'
+ end
+ end
+
+ context 'exposing @group' do
+ let(:group) { create(:group) }
+
+ it_behaves_like 'has the proper context'
+ end
+
+ context 'exposing @snippet, @snippets' do
+ let(:snippet) { create(:snippet) }
+ let(:snippets) { create_list(:snippet, 3) }
+
+ it_behaves_like 'has the proper context'
+ end
+
+ context 'exposing @ref' do
+ let(:ref) { Gitlab::Git::EMPTY_TREE_ID }
+
+ it_behaves_like 'has the proper context'
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
index 80e8da58f23..7a8aba2d396 100644
--- a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
@@ -76,7 +76,12 @@ describe Gitlab::SidekiqConfig::CliMethods do
describe '.expand_queues' do
let(:worker_queues) do
- ['cronjob:stuck_import_jobs', 'cronjob:stuck_merge_jobs', 'post_receive']
+ [
+ 'cronjob:import_stuck_project_import_jobs',
+ 'cronjob:jira_import_stuck_jira_import_jobs',
+ 'cronjob:stuck_merge_jobs',
+ 'post_receive'
+ ]
end
it 'defaults the value of the second argument to .worker_queues' do
@@ -88,12 +93,22 @@ describe Gitlab::SidekiqConfig::CliMethods do
allow(described_class).to receive(:worker_queues).and_return(worker_queues)
expect(described_class.expand_queues(['cronjob']))
- .to contain_exactly('cronjob', 'cronjob:stuck_import_jobs', 'cronjob:stuck_merge_jobs')
+ .to contain_exactly(
+ 'cronjob',
+ 'cronjob:import_stuck_project_import_jobs',
+ 'cronjob:jira_import_stuck_jira_import_jobs',
+ 'cronjob:stuck_merge_jobs'
+ )
end
it 'expands queue namespaces to concrete queue names' do
expect(described_class.expand_queues(['cronjob'], worker_queues))
- .to contain_exactly('cronjob', 'cronjob:stuck_import_jobs', 'cronjob:stuck_merge_jobs')
+ .to contain_exactly(
+ 'cronjob',
+ 'cronjob:import_stuck_project_import_jobs',
+ 'cronjob:jira_import_stuck_jira_import_jobs',
+ 'cronjob:stuck_merge_jobs'
+ )
end
it 'lets concrete queue names pass through' do
@@ -117,28 +132,32 @@ describe Gitlab::SidekiqConfig::CliMethods do
feature_category: :category_a,
has_external_dependencies: false,
urgency: :low,
- resource_boundary: :cpu
+ resource_boundary: :cpu,
+ tags: [:no_disk_io, :git_access]
},
{
name: 'a:2',
feature_category: :category_a,
has_external_dependencies: false,
urgency: :high,
- resource_boundary: :none
+ resource_boundary: :none,
+ tags: [:git_access]
},
{
name: 'b',
feature_category: :category_b,
has_external_dependencies: true,
urgency: :high,
- resource_boundary: :memory
+ resource_boundary: :memory,
+ tags: [:no_disk_io]
},
{
name: 'c',
feature_category: :category_c,
has_external_dependencies: false,
urgency: :throttled,
- resource_boundary: :memory
+ resource_boundary: :memory,
+ tags: []
}
]
end
@@ -177,6 +196,18 @@ describe Gitlab::SidekiqConfig::CliMethods do
'resource_boundary=memory|resource_boundary=cpu' | %w(a b c)
'resource_boundary!=memory,cpu' | %w(a:2)
+ # tags
+ 'tags=no_disk_io' | %w(a b)
+ 'tags=no_disk_io,git_access' | %w(a a:2 b)
+ 'tags=no_disk_io|tags=git_access' | %w(a a:2 b)
+ 'tags=no_disk_io&tags=git_access' | %w(a)
+ 'tags!=no_disk_io' | %w(a:2 c)
+ 'tags!=no_disk_io,git_access' | %w(c)
+ 'tags=unknown_tag' | []
+ 'tags!=no_disk_io' | %w(a:2 c)
+ 'tags!=no_disk_io,git_access' | %w(c)
+ 'tags!=unknown_tag' | %w(a a:2 b c)
+
# combinations
'feature_category=category_a&urgency=high' | %w(a:2)
'feature_category=category_a&urgency=high|feature_category=category_c' | %w(a:2 c)
diff --git a/spec/lib/gitlab/sidekiq_config/worker_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
index 817755e3507..00343a0264d 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
@@ -13,7 +13,8 @@ describe Gitlab::SidekiqConfig::Worker do
get_worker_resource_boundary: attributes[:resource_boundary],
get_urgency: attributes[:urgency],
worker_has_external_dependencies?: attributes[:has_external_dependencies],
- idempotent?: attributes[:idempotent]
+ idempotent?: attributes[:idempotent],
+ get_tags: attributes[:tags]
)
described_class.new(inner_worker, ee: false)
@@ -91,7 +92,8 @@ describe Gitlab::SidekiqConfig::Worker do
urgency: :low,
resource_boundary: :memory,
weight: 2,
- idempotent: true
+ idempotent: true,
+ tags: []
}
attributes_b = {
@@ -100,7 +102,8 @@ describe Gitlab::SidekiqConfig::Worker do
urgency: :high,
resource_boundary: :unknown,
weight: 3,
- idempotent: false
+ idempotent: false,
+ tags: [:no_disk_io]
}
worker_a = create_worker(queue: 'a', **attributes_a)
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index 85de1d029c3..66744d07aaa 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -18,7 +18,8 @@ describe Gitlab::SidekiqConfig do
expect(queues).to include('post_receive')
expect(queues).to include('merge')
- expect(queues).to include('cronjob:stuck_import_jobs')
+ expect(queues).to include('cronjob:import_stuck_project_import_jobs')
+ expect(queues).to include('cronjob:jira_import_stuck_jira_import_jobs')
expect(queues).to include('mailers')
expect(queues).to include('default')
end
diff --git a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
index 283140d7fdf..10354147cf9 100644
--- a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb
@@ -14,6 +14,7 @@ describe Gitlab::SidekiqLogging::JSONFormatter do
let(:hash_input) do
{
foo: 1,
+ 'class' => 'PostReceive',
'bar' => 'test',
'created_at' => timestamp,
'enqueued_at' => timestamp,
@@ -42,21 +43,47 @@ describe Gitlab::SidekiqLogging::JSONFormatter do
expect(subject).to eq(expected_output)
end
- context 'when the job args are bigger than the maximum allowed' do
- it 'keeps args from the front until they exceed the limit' do
- half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
- hash_input['args'] = [1, 2, 'a' * half_limit, 'b' * half_limit, 3]
+ it 'removes jobstr from the hash' do
+ hash_input[:jobstr] = 'job string'
- expected_args = hash_input['args'].take(3).map(&:to_s) + ['...']
+ expect(subject).not_to include('jobstr')
+ end
- expect(subject['args']).to eq(expected_args)
- end
+ it 'does not modify the input hash' do
+ input = { 'args' => [1, 'string'] }
+
+ output = Gitlab::Json.parse(described_class.new.call('INFO', now, 'my program', input))
+
+ expect(input['args']).to eq([1, 'string'])
+ expect(output['args']).to eq(['1', '[FILTERED]'])
end
- it 'properly flattens arguments to a String' do
- hash_input['args'] = [1, "test", 2, { 'test' => 1 }]
+ context 'job arguments' do
+ context 'when the arguments are bigger than the maximum allowed' do
+ it 'keeps args from the front until they exceed the limit' do
+ half_limit = Gitlab::Utils::LogLimitedArray::MAXIMUM_ARRAY_LENGTH / 2
+ hash_input['args'] = [1, 2, 'a' * half_limit, 'b' * half_limit, 3]
+
+ expected_args = hash_input['args'].take(3).map(&:to_s) + ['...']
+
+ expect(subject['args']).to eq(expected_args)
+ end
+ end
+
+ context 'when the job has non-integer arguments' do
+ it 'only allows permitted non-integer arguments through' do
+ hash_input['args'] = [1, 'foo', 'bar']
+ hash_input['class'] = 'WebHookWorker'
- expect(subject['args']).to eq(["1", "test", "2", %({"test"=>1})])
+ expect(subject['args']).to eq(['1', '[FILTERED]', 'bar'])
+ end
+ end
+
+ it 'properly flattens arguments to a String' do
+ hash_input['args'] = [1, "test", 2, { 'test' => 1 }]
+
+ expect(subject['args']).to eq(["1", "test", "2", %({"test"=>1})])
+ end
end
context 'when the job has a non-integer value for retry' do
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index a4bbb51baae..a456f814e78 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -21,7 +21,10 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
"correlation_id" => 'cid',
"error_message" => "wrong number of arguments (2 for 3)",
"error_class" => "ArgumentError",
- "error_backtrace" => []
+ "error_backtrace" => [],
+ "db_count" => 1,
+ "db_write_count" => 0,
+ "db_cached_count" => 0
}
end
@@ -197,7 +200,10 @@ describe Gitlab::SidekiqLogging::StructuredLogger do
let(:expected_end_payload_with_db) do
expected_end_payload.merge(
- 'db_duration_s' => a_value >= 0.1
+ 'db_duration_s' => a_value >= 0.1,
+ 'db_count' => 1,
+ 'db_cached_count' => 0,
+ 'db_write_count' => 0
)
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
index 5f80ef9538a..1d45b70ec3e 100644
--- a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -47,8 +47,11 @@ describe Gitlab::SidekiqMiddleware::ClientMetrics do
end
context "when workers are not attributed" do
- class TestNonAttributedWorker
- include Sidekiq::Worker
+ before do
+ stub_const('TestNonAttributedWorker', Class.new)
+ TestNonAttributedWorker.class_eval do
+ include Sidekiq::Worker
+ end
end
it_behaves_like "a metrics client middleware" do
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
index 9c7f6638913..a1e4cbb1e31 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/client_spec.rb
@@ -31,14 +31,51 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::Client, :clean_gitlab_redis_q
expect(job3['duplicate-of']).to eq(job1['jid'])
end
- it "does not mark a job that's scheduled in the future as a duplicate" do
- TestDeduplicationWorker.perform_async('args1')
- TestDeduplicationWorker.perform_at(1.day.from_now, 'args1')
- TestDeduplicationWorker.perform_in(3.hours, 'args1')
+ context 'without scheduled deduplication' do
+ it "does not mark a job that's scheduled in the future as a duplicate" do
+ TestDeduplicationWorker.perform_async('args1')
+ TestDeduplicationWorker.perform_at(1.day.from_now, 'args1')
+ TestDeduplicationWorker.perform_in(3.hours, 'args1')
- duplicates = TestDeduplicationWorker.jobs.map { |job| job['duplicate-of'] }
+ duplicates = TestDeduplicationWorker.jobs.map { |job| job['duplicate-of'] }
- expect(duplicates).to all(be_nil)
+ expect(duplicates).to all(be_nil)
+ end
+ end
+
+ context 'with scheduled deduplication' do
+ let(:scheduled_worker_class) do
+ Class.new do
+ def self.name
+ 'TestDeduplicationWorker'
+ end
+
+ include ApplicationWorker
+
+ deduplicate :until_executing, including_scheduled: true
+
+ def perform(*args)
+ end
+ end
+ end
+
+ before do
+ stub_const('TestDeduplicationWorker', scheduled_worker_class)
+ end
+
+ it 'adds a correct duplicate tag to the jobs', :aggregate_failures do
+ TestDeduplicationWorker.perform_async('args1')
+ TestDeduplicationWorker.perform_at(1.day.from_now, 'args1')
+ TestDeduplicationWorker.perform_in(3.hours, 'args1')
+ TestDeduplicationWorker.perform_in(3.hours, 'args2')
+
+ job1, job2, job3, job4 = TestDeduplicationWorker.jobs
+
+ expect(job1['duplicate-of']).to be_nil
+ expect(job2['duplicate-of']).to eq(job1['jid'])
+ expect(job3['duplicate-of']).to eq(job1['jid'])
+ expect(job4['duplicate-of']).to be_nil
+ end
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index 929df0a7ffb..13c86563be7 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -93,6 +93,25 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_r
end
end
+ describe '#scheduled?' do
+ it 'returns false for non-scheduled jobs' do
+ expect(duplicate_job.scheduled?).to be(false)
+ end
+
+ context 'scheduled jobs' do
+ let(:job) do
+ { 'class' => 'AuthorizedProjectsWorker',
+ 'args' => [1],
+ 'jid' => '123',
+ 'at' => 42 }
+ end
+
+ it 'returns true' do
+ expect(duplicate_job.scheduled?).to be(true)
+ end
+ end
+ end
+
describe '#duplicate?' do
it "raises an error if the check wasn't performed" do
expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
@@ -112,28 +131,23 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_r
end
end
- describe 'droppable?' do
- where(:idempotent, :duplicate, :prevent_deduplication) do
- # [true, false].repeated_permutation(3)
- [[true, true, true],
- [true, true, false],
- [true, false, true],
- [true, false, false],
- [false, true, true],
- [false, true, false],
- [false, false, true],
- [false, false, false]]
+ describe '#droppable?' do
+ where(:idempotent, :prevent_deduplication) do
+ # [true, false].repeated_permutation(2)
+ [[true, true],
+ [true, false],
+ [false, true],
+ [false, false]]
end
with_them do
before do
allow(AuthorizedProjectsWorker).to receive(:idempotent?).and_return(idempotent)
- allow(duplicate_job).to receive(:duplicate?).and_return(duplicate)
stub_feature_flags("disable_#{queue}_deduplication" => prevent_deduplication)
end
it 'is droppable when all conditions are met' do
- if idempotent && duplicate && !prevent_deduplication
+ if idempotent && !prevent_deduplication
expect(duplicate_job).to be_droppable
else
expect(duplicate_job).not_to be_droppable
@@ -142,6 +156,31 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_r
end
end
+ describe '#scheduled_at' do
+ let(:scheduled_at) { 42 }
+ let(:job) do
+ { 'class' => 'AuthorizedProjectsWorker',
+ 'args' => [1],
+ 'jid' => '123',
+ 'at' => scheduled_at }
+ end
+
+ it 'returns when the job is scheduled at' do
+ expect(duplicate_job.scheduled_at).to eq(scheduled_at)
+ end
+ end
+
+ describe '#options' do
+ let(:worker_options) { { foo: true } }
+
+ it 'returns worker options' do
+ allow(AuthorizedProjectsWorker).to(
+ receive(:get_deduplication_options).and_return(worker_options))
+
+ expect(duplicate_job.options).to eq(worker_options)
+ end
+ end
+
def set_idempotency_key(key, value = '1')
Sidekiq.redis { |r| r.set(key, value) }
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
index 31b51260ebd..eb8b0a951a8 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'timecop'
describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting do
let(:fake_duplicate_job) do
@@ -15,28 +16,90 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting do
end
it 'checks for duplicates before yielding' do
- expect(fake_duplicate_job).to receive(:check!).ordered.and_return('a jid')
+ expect(fake_duplicate_job).to receive(:scheduled?).twice.ordered.and_return(false)
+ expect(fake_duplicate_job).to(
+ receive(:check!)
+ .with(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob::DUPLICATE_KEY_TTL)
+ .ordered
+ .and_return('a jid'))
expect(fake_duplicate_job).to receive(:duplicate?).ordered.and_return(false)
- expect(fake_duplicate_job).to receive(:droppable?).ordered.and_return(false)
expect { |b| strategy.schedule({}, &b) }.to yield_control
end
- it 'adds the jid of the existing job to the job hash' do
- allow(fake_duplicate_job).to receive(:check!).and_return('the jid')
- allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
- job_hash = {}
+ it 'checks worker options for scheduled jobs' do
+ expect(fake_duplicate_job).to receive(:scheduled?).ordered.and_return(true)
+ expect(fake_duplicate_job).to receive(:options).ordered.and_return({})
+ expect(fake_duplicate_job).not_to receive(:check!)
- expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
- expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
+ expect { |b| strategy.schedule({}, &b) }.to yield_control
+ end
+
+ context 'job marking' do
+ it 'adds the jid of the existing job to the job hash' do
+ allow(fake_duplicate_job).to receive(:scheduled?).and_return(false)
+ allow(fake_duplicate_job).to receive(:check!).and_return('the jid')
+ allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
+ job_hash = {}
- strategy.schedule(job_hash) {}
+ expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
+ expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
- expect(job_hash).to include('duplicate-of' => 'the jid')
+ strategy.schedule(job_hash) {}
+
+ expect(job_hash).to include('duplicate-of' => 'the jid')
+ end
+
+ context 'scheduled jobs' do
+ let(:time_diff) { 1.minute }
+
+ context 'scheduled in the past' do
+ it 'adds the jid of the existing job to the job hash' do
+ allow(fake_duplicate_job).to receive(:scheduled?).twice.and_return(true)
+ allow(fake_duplicate_job).to receive(:scheduled_at).and_return(Time.now - time_diff)
+ allow(fake_duplicate_job).to receive(:options).and_return({ including_scheduled: true })
+ allow(fake_duplicate_job).to(
+ receive(:check!)
+ .with(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob::DUPLICATE_KEY_TTL)
+ .and_return('the jid'))
+ allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
+ job_hash = {}
+
+ expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
+ expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
+
+ strategy.schedule(job_hash) {}
+
+ expect(job_hash).to include('duplicate-of' => 'the jid')
+ end
+ end
+
+ context 'scheduled in the future' do
+ it 'adds the jid of the existing job to the job hash' do
+ Timecop.freeze do
+ allow(fake_duplicate_job).to receive(:scheduled?).twice.and_return(true)
+ allow(fake_duplicate_job).to receive(:scheduled_at).and_return(Time.now + time_diff)
+ allow(fake_duplicate_job).to receive(:options).and_return({ including_scheduled: true })
+ allow(fake_duplicate_job).to(
+ receive(:check!).with(time_diff.to_i).and_return('the jid'))
+ allow(fake_duplicate_job).to receive(:droppable?).and_return(true)
+ job_hash = {}
+
+ expect(fake_duplicate_job).to receive(:duplicate?).and_return(true)
+ expect(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
+
+ strategy.schedule(job_hash) {}
+
+ expect(job_hash).to include('duplicate-of' => 'the jid')
+ end
+ end
+ end
+ end
end
context "when the job is droppable" do
before do
+ allow(fake_duplicate_job).to receive(:scheduled?).and_return(false)
allow(fake_duplicate_job).to receive(:check!).and_return('the jid')
allow(fake_duplicate_job).to receive(:duplicate?).and_return(true)
allow(fake_duplicate_job).to receive(:existing_jid).and_return('the jid')
@@ -52,7 +115,7 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting do
expect(schedule_result).to be(false)
end
- it 'logs that the job wass dropped' do
+ it 'logs that the job was dropped' do
fake_logger = instance_double(Gitlab::SidekiqLogging::DeduplicationLogger)
expect(Gitlab::SidekiqLogging::DeduplicationLogger).to receive(:instance).and_return(fake_logger)
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 3214bd758e7..4b7baea25e8 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -31,7 +31,11 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
let(:gitaly_seconds_metric) { double('gitaly seconds metric') }
let(:failed_total_metric) { double('failed total metric') }
let(:retried_total_metric) { double('retried total metric') }
+ let(:redis_requests_total) { double('redis calls total metric') }
let(:running_jobs_metric) { double('running jobs metric') }
+ let(:redis_seconds_metric) { double('redis seconds metric') }
+ let(:elasticsearch_seconds_metric) { double('elasticsearch seconds metric') }
+ let(:elasticsearch_requests_total) { double('elasticsearch calls total metric') }
before do
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_queue_duration_seconds, anything, anything, anything).and_return(queue_duration_seconds)
@@ -39,8 +43,12 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_db_seconds, anything, anything, anything).and_return(db_seconds_metric)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything).and_return(gitaly_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_redis_requests_duration_seconds, anything, anything, anything).and_return(redis_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_elasticsearch_requests_duration_seconds, anything, anything, anything).and_return(elasticsearch_seconds_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_redis_requests_total, anything).and_return(redis_requests_total)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_elasticsearch_requests_total, anything).and_return(elasticsearch_requests_total)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric)
@@ -69,21 +77,35 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
let(:db_duration) { 3 }
let(:gitaly_duration) { 4 }
+ let(:redis_calls) { 2 }
+ let(:redis_duration) { 0.01 }
+
+ let(:elasticsearch_calls) { 8 }
+ let(:elasticsearch_duration) { 0.54 }
+
before do
allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job)
allow(ActiveRecord::LogSubscriber).to receive(:runtime).and_return(db_duration * 1000)
- allow(subject).to receive(:get_gitaly_time).and_return(gitaly_duration)
-
- expect(running_jobs_metric).to receive(:increment).with(labels, 1)
- expect(running_jobs_metric).to receive(:increment).with(labels, -1)
- expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
- expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
- expect(db_seconds_metric).to receive(:observe).with(labels_with_job_status, db_duration)
- expect(gitaly_seconds_metric).to receive(:observe).with(labels_with_job_status, gitaly_duration)
- expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
+ job[:gitaly_duration_s] = gitaly_duration
+ job[:redis_calls] = redis_calls
+ job[:redis_duration_s] = redis_duration
+
+ job[:elasticsearch_calls] = elasticsearch_calls
+ job[:elasticsearch_duration_s] = elasticsearch_duration
+
+ allow(running_jobs_metric).to receive(:increment)
+ allow(redis_requests_total).to receive(:increment)
+ allow(elasticsearch_requests_total).to receive(:increment)
+ allow(queue_duration_seconds).to receive(:observe)
+ allow(user_execution_seconds_metric).to receive(:observe)
+ allow(db_seconds_metric).to receive(:observe)
+ allow(gitaly_seconds_metric).to receive(:observe)
+ allow(completion_seconds_metric).to receive(:observe)
+ allow(redis_seconds_metric).to receive(:observe)
+ allow(elasticsearch_seconds_metric).to receive(:observe)
end
it 'yields block' do
@@ -91,6 +113,18 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
it 'sets queue specific metrics' do
+ expect(running_jobs_metric).to receive(:increment).with(labels, -1)
+ expect(running_jobs_metric).to receive(:increment).with(labels, 1)
+ expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
+ expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
+ expect(db_seconds_metric).to receive(:observe).with(labels_with_job_status, db_duration)
+ expect(gitaly_seconds_metric).to receive(:observe).with(labels_with_job_status, gitaly_duration)
+ expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
+ expect(redis_seconds_metric).to receive(:observe).with(labels_with_job_status, redis_duration)
+ expect(elasticsearch_seconds_metric).to receive(:observe).with(labels_with_job_status, elasticsearch_duration)
+ expect(redis_requests_total).to receive(:increment).with(labels_with_job_status, redis_calls)
+ expect(elasticsearch_requests_total).to receive(:increment).with(labels_with_job_status, elasticsearch_calls)
+
subject.call(worker, job, :test) { nil }
end
@@ -144,9 +178,13 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
end
context "when workers are not attributed" do
- class TestNonAttributedWorker
- include Sidekiq::Worker
+ before do
+ stub_const('TestNonAttributedWorker', Class.new)
+ TestNonAttributedWorker.class_eval do
+ include Sidekiq::Worker
+ end
end
+
let(:worker) { TestNonAttributedWorker.new }
let(:labels) { default_labels.merge(urgency: "") }
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 6fe61fb42a5..5ca0abeb132 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -13,7 +13,7 @@ describe Gitlab::SidekiqMiddleware do
def perform(_arg)
Gitlab::SafeRequestStore['gitaly_call_actual'] = 1
- Gitlab::GitalyClient.query_time = 5
+ Gitlab::SafeRequestStore[:gitaly_query_time] = 5
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_versioning/manager_spec.rb b/spec/lib/gitlab/sidekiq_versioning/manager_spec.rb
index 2aa7d1fd6d8..a528ce201a2 100644
--- a/spec/lib/gitlab/sidekiq_versioning/manager_spec.rb
+++ b/spec/lib/gitlab/sidekiq_versioning/manager_spec.rb
@@ -16,7 +16,8 @@ describe Gitlab::SidekiqVersioning::Manager do
expect(queues).to include('post_receive')
expect(queues).to include('repository_fork')
expect(queues).to include('cronjob')
- expect(queues).to include('cronjob:stuck_import_jobs')
+ expect(queues).to include('cronjob:import_stuck_project_import_jobs')
+ expect(queues).to include('cronjob:jira_import_stuck_jira_import_jobs')
expect(queues).to include('cronjob:stuck_merge_jobs')
expect(queues).to include('unknown')
end
diff --git a/spec/lib/gitlab/sourcegraph_spec.rb b/spec/lib/gitlab/sourcegraph_spec.rb
index e081ae32175..ef4008960a9 100644
--- a/spec/lib/gitlab/sourcegraph_spec.rb
+++ b/spec/lib/gitlab/sourcegraph_spec.rb
@@ -7,7 +7,7 @@ describe Gitlab::Sourcegraph do
let(:feature_scope) { true }
before do
- Feature.enable(:sourcegraph, feature_scope)
+ stub_feature_flags(sourcegraph: feature_scope)
end
describe '.feature_conditional?' do
diff --git a/spec/lib/gitlab/suggestions/commit_message_spec.rb b/spec/lib/gitlab/suggestions/commit_message_spec.rb
new file mode 100644
index 00000000000..0774fc80528
--- /dev/null
+++ b/spec/lib/gitlab/suggestions/commit_message_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Suggestions::CommitMessage do
+ def create_suggestion(file_path, new_line, to_content)
+ position = Gitlab::Diff::Position.new(old_path: file_path,
+ new_path: file_path,
+ old_line: nil,
+ new_line: new_line,
+ diff_refs: merge_request.diff_refs)
+
+ diff_note = create(:diff_note_on_merge_request,
+ noteable: merge_request,
+ position: position,
+ project: project)
+
+ create(:suggestion,
+ :content_from_repo,
+ note: diff_note,
+ to_content: to_content)
+ end
+
+ let_it_be(:user) do
+ create(:user, :commit_email, name: 'Test User', username: 'test.user')
+ end
+
+ let_it_be(:project) do
+ create(:project, :repository, path: 'project-1', name: 'Project_1')
+ end
+
+ let_it_be(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project)
+ end
+
+ let_it_be(:suggestion_set) do
+ suggestion1 = create_suggestion('files/ruby/popen.rb', 9, '*** SUGGESTION 1 ***')
+ suggestion2 = create_suggestion('files/ruby/popen.rb', 13, '*** SUGGESTION 2 ***')
+ suggestion3 = create_suggestion('files/ruby/regex.rb', 22, '*** SUGGESTION 3 ***')
+
+ Gitlab::Suggestions::SuggestionSet.new([suggestion1, suggestion2, suggestion3])
+ end
+
+ describe '#message' do
+ before do
+ # Updating the suggestion_commit_message on a project shared across specs
+ # avoids recreating the repository for each spec.
+ project.update!(suggestion_commit_message: message)
+ end
+
+ context 'when a custom commit message is not specified' do
+ let(:expected_message) { 'Apply 3 suggestion(s) to 2 file(s)' }
+
+ context 'and is nil' do
+ let(:message) { nil }
+
+ it 'uses the default commit message' do
+ expect(described_class
+ .new(user, suggestion_set)
+ .message).to eq(expected_message)
+ end
+ end
+
+ context 'and is an empty string' do
+ let(:message) { '' }
+
+ it 'uses the default commit message' do
+ expect(described_class
+ .new(user, suggestion_set)
+ .message).to eq(expected_message)
+ end
+ end
+ end
+
+ context 'is specified and includes all placeholders' do
+ let(:message) do
+ '*** %{branch_name} %{files_count} %{file_paths} %{project_name} %{project_path} %{user_full_name} %{username} %{suggestions_count} ***'
+ end
+
+ it 'generates a custom commit message' do
+ expect(Gitlab::Suggestions::CommitMessage
+ .new(user, suggestion_set)
+ .message).to eq('*** master 2 files/ruby/popen.rb, files/ruby/regex.rb Project_1 project-1 Test User test.user 3 ***')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/suggestions/file_suggestion_spec.rb b/spec/lib/gitlab/suggestions/file_suggestion_spec.rb
new file mode 100644
index 00000000000..6fbbad017c5
--- /dev/null
+++ b/spec/lib/gitlab/suggestions/file_suggestion_spec.rb
@@ -0,0 +1,241 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Suggestions::FileSuggestion do
+ def create_suggestion(new_line, to_content)
+ position = Gitlab::Diff::Position.new(old_path: file_path,
+ new_path: file_path,
+ old_line: nil,
+ new_line: new_line,
+ diff_refs: merge_request.diff_refs)
+
+ diff_note = create(:diff_note_on_merge_request,
+ noteable: merge_request,
+ position: position,
+ project: project)
+
+ create(:suggestion,
+ :content_from_repo,
+ note: diff_note,
+ to_content: to_content)
+ end
+
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:file_path) { 'files/ruby/popen.rb'}
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let_it_be(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project)
+ end
+
+ let_it_be(:suggestion1) do
+ create_suggestion(9, " *** SUGGESTION 1 ***\n")
+ end
+
+ let_it_be(:suggestion2) do
+ create_suggestion(15, " *** SUGGESTION 2 ***\n")
+ end
+
+ let(:file_suggestion) { described_class.new }
+
+ describe '#add_suggestion' do
+ it 'succeeds when adding a suggestion for the same file as the original' do
+ file_suggestion.add_suggestion(suggestion1)
+
+ expect { file_suggestion.add_suggestion(suggestion2) }.not_to raise_error
+ end
+
+ it 'raises an error when adding a suggestion for a different file' do
+ allow(suggestion2)
+ .to(receive_message_chain(:diff_file, :file_path)
+ .and_return('path/to/different/file'))
+
+ file_suggestion.add_suggestion(suggestion1)
+
+ expect { file_suggestion.add_suggestion(suggestion2) }.to(
+ raise_error(described_class::SuggestionForDifferentFileError)
+ )
+ end
+ end
+
+ describe '#line_conflict' do
+ def stub_suggestions(line_index_spans)
+ fake_suggestions = line_index_spans.map do |span|
+ double("Suggestion",
+ from_line_index: span[:from_line_index],
+ to_line_index: span[:to_line_index])
+ end
+
+ allow(file_suggestion).to(receive(:suggestions).and_return(fake_suggestions))
+ end
+
+ context 'when line ranges do not overlap' do
+ it 'return false' do
+ stub_suggestions(
+ [
+ {
+ from_line_index: 0,
+ to_line_index: 10
+ },
+ {
+ from_line_index: 11,
+ to_line_index: 20
+ }
+ ]
+ )
+
+ expect(file_suggestion.line_conflict?).to be(false)
+ end
+ end
+
+ context 'when line ranges are identical' do
+ it 'returns true' do
+ stub_suggestions(
+ [
+ {
+ from_line_index: 0,
+ to_line_index: 10
+ },
+ {
+ from_line_index: 0,
+ to_line_index: 10
+ }
+ ]
+ )
+
+ expect(file_suggestion.line_conflict?).to be(true)
+ end
+ end
+
+ context 'when one range starts, and the other ends, on the same line' do
+ it 'returns true' do
+ stub_suggestions(
+ [
+ {
+ from_line_index: 0,
+ to_line_index: 10
+ },
+ {
+ from_line_index: 10,
+ to_line_index: 20
+ }
+ ]
+ )
+
+ expect(file_suggestion.line_conflict?).to be(true)
+ end
+ end
+
+ context 'when one line range contains the other' do
+ it 'returns true' do
+ stub_suggestions(
+ [
+ {
+ from_line_index: 0,
+ to_line_index: 10
+ },
+ {
+ from_line_index: 5,
+ to_line_index: 7
+ }
+ ]
+ )
+
+ expect(file_suggestion.line_conflict?).to be(true)
+ end
+ end
+
+ context 'when line ranges overlap' do
+ it 'returns true' do
+ stub_suggestions(
+ [
+ {
+ from_line_index: 0,
+ to_line_index: 10
+ },
+ {
+ from_line_index: 8,
+ to_line_index: 15
+ }
+ ]
+ )
+
+ expect(file_suggestion.line_conflict?).to be(true)
+ end
+ end
+
+ context 'when no suggestions have been added' do
+ it 'returns false' do
+ expect(file_suggestion.line_conflict?).to be(false)
+ end
+ end
+ end
+
+ describe '#new_content' do
+ it 'returns a blob with the suggestions applied to it' do
+ file_suggestion.add_suggestion(suggestion1)
+ file_suggestion.add_suggestion(suggestion2)
+
+ expected_content = <<-CONTENT.strip_heredoc
+ require 'fileutils'
+ require 'open3'
+
+ module Popen
+ extend self
+
+ def popen(cmd, path=nil)
+ unless cmd.is_a?(Array)
+ *** SUGGESTION 1 ***
+ end
+
+ path ||= Dir.pwd
+
+ vars = {
+ *** SUGGESTION 2 ***
+ }
+
+ options = {
+ chdir: path
+ }
+
+ unless File.directory?(path)
+ FileUtils.mkdir_p(path)
+ end
+
+ @cmd_output = ""
+ @cmd_status = 0
+
+ Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|
+ @cmd_output << stdout.read
+ @cmd_output << stderr.read
+ @cmd_status = wait_thr.value.exitstatus
+ end
+
+ return @cmd_output, @cmd_status
+ end
+ end
+ CONTENT
+
+ expect(file_suggestion.new_content).to eq(expected_content)
+ end
+
+ it 'returns an empty string when no suggestions have been added' do
+ expect(file_suggestion.new_content).to eq('')
+ end
+ end
+
+ describe '#file_path' do
+ it 'returns the path of the file associated with the suggestions' do
+ file_suggestion.add_suggestion(suggestion1)
+
+ expect(file_suggestion.file_path).to eq(file_path)
+ end
+
+ it 'returns nil if no suggestions have been added' do
+ expect(file_suggestion.file_path).to be(nil)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/suggestions/suggestion_set_spec.rb b/spec/lib/gitlab/suggestions/suggestion_set_spec.rb
new file mode 100644
index 00000000000..8c61e6c42a6
--- /dev/null
+++ b/spec/lib/gitlab/suggestions/suggestion_set_spec.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Suggestions::SuggestionSet do
+ def create_suggestion(file_path, new_line, to_content)
+ position = Gitlab::Diff::Position.new(old_path: file_path,
+ new_path: file_path,
+ old_line: nil,
+ new_line: new_line,
+ diff_refs: merge_request.diff_refs)
+
+ diff_note = create(:diff_note_on_merge_request,
+ noteable: merge_request,
+ position: position,
+ project: project)
+
+ create(:suggestion,
+ :content_from_repo,
+ note: diff_note,
+ to_content: to_content)
+ end
+
+ let_it_be(:user) { create(:user) }
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let_it_be(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project)
+ end
+
+ let_it_be(:suggestion) { create(:suggestion)}
+
+ let_it_be(:suggestion2) do
+ create_suggestion('files/ruby/popen.rb', 13, "*** SUGGESTION 2 ***")
+ end
+
+ let_it_be(:suggestion3) do
+ create_suggestion('files/ruby/regex.rb', 22, "*** SUGGESTION 3 ***")
+ end
+
+ let_it_be(:unappliable_suggestion) { create(:suggestion, :unappliable) }
+
+ let(:suggestion_set) { described_class.new([suggestion]) }
+
+ describe '#project' do
+ it 'returns the project associated with the suggestions' do
+ expected_project = suggestion.project
+
+ expect(suggestion_set.project).to be(expected_project)
+ end
+ end
+
+ describe '#branch' do
+ it 'returns the branch associated with the suggestions' do
+ expected_branch = suggestion.branch
+
+ expect(suggestion_set.branch).to be(expected_branch)
+ end
+ end
+
+ describe '#valid?' do
+ it 'returns true if no errors are found' do
+ expect(suggestion_set.valid?).to be(true)
+ end
+
+ it 'returns false if an error is found' do
+ suggestion_set = described_class.new([unappliable_suggestion])
+
+ expect(suggestion_set.valid?).to be(false)
+ end
+ end
+
+ describe '#error_message' do
+ it 'returns an error message if an error is found' do
+ suggestion_set = described_class.new([unappliable_suggestion])
+
+ expect(suggestion_set.error_message).to be_a(String)
+ end
+
+ it 'returns nil if no errors are found' do
+ expect(suggestion_set.error_message).to be(nil)
+ end
+ end
+
+ describe '#actions' do
+ it 'returns an array of hashes with proper key/value pairs' do
+ first_action = suggestion_set.actions.first
+
+ file_path, file_suggestion = suggestion_set
+ .send(:suggestions_per_file).first
+
+ expect(first_action[:action]).to be('update')
+ expect(first_action[:file_path]).to eq(file_path)
+ expect(first_action[:content]).to eq(file_suggestion.new_content)
+ end
+ end
+
+ describe '#file_paths' do
+ it 'returns an array of unique file paths associated with the suggestions' do
+ suggestion_set = described_class.new([suggestion, suggestion2, suggestion3])
+
+ expected_paths = %w(files/ruby/popen.rb files/ruby/regex.rb)
+
+ actual_paths = suggestion_set.file_paths
+
+ expect(actual_paths.sort).to eq(expected_paths)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 2e65f98a085..82828c2dcce 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -27,14 +27,13 @@ describe Gitlab::Tracking do
expect(subject.snowplow_options(nil)).to match(expected_fields)
end
- it 'enables features using feature flags' do
- stub_feature_flags(additional_snowplow_tracking: :__group__)
- addition_feature_fields = {
+ it 'when feature flag is disabled' do
+ stub_feature_flags(additional_snowplow_tracking: false)
+
+ expect(subject.snowplow_options(nil)).to include(
formTracking: false,
linkClickTracking: false
- }
-
- expect(subject.snowplow_options(:_group_)).to include(addition_feature_fields)
+ )
end
end
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index 66826bcb3b1..e91d17bfbe8 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -96,6 +96,38 @@ describe Gitlab::UrlBuilder do
end
end
+ context 'when passing a Wiki' do
+ let(:wiki) { build_stubbed(:project_wiki) }
+
+ describe '#wiki_url' do
+ it 'uses the default collection action' do
+ url = subject.wiki_url(wiki)
+
+ expect(url).to eq "#{Gitlab.config.gitlab.url}/#{wiki.project.full_path}/-/wikis/home"
+ end
+
+ it 'supports a custom collection action' do
+ url = subject.wiki_url(wiki, action: :pages)
+
+ expect(url).to eq "#{Gitlab.config.gitlab.url}/#{wiki.project.full_path}/-/wikis/pages"
+ end
+ end
+
+ describe '#wiki_page_url' do
+ it 'uses the default member action' do
+ url = subject.wiki_page_url(wiki, 'foo')
+
+ expect(url).to eq "#{Gitlab.config.gitlab.url}/#{wiki.project.full_path}/-/wikis/foo"
+ end
+
+ it 'supports a custom member action' do
+ url = subject.wiki_page_url(wiki, 'foo', action: :edit)
+
+ expect(url).to eq "#{Gitlab.config.gitlab.url}/#{wiki.project.full_path}/-/wikis/foo/edit"
+ end
+ end
+ end
+
context 'when passing a DesignManagement::Design' do
let(:design) { build_stubbed(:design) }
diff --git a/spec/lib/gitlab/usage_data_concerns/topology_spec.rb b/spec/lib/gitlab/usage_data_concerns/topology_spec.rb
new file mode 100644
index 00000000000..0428900690c
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_concerns/topology_spec.rb
@@ -0,0 +1,220 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::UsageDataConcerns::Topology do
+ include UsageDataHelpers
+
+ describe '#topology_usage_data' do
+ subject { Class.new.extend(described_class).topology_usage_data }
+
+ before do
+ # this pins down time shifts when benchmarking durations
+ allow(Process).to receive(:clock_gettime).and_return(0)
+ end
+
+ context 'when embedded Prometheus server is enabled' do
+ before do
+ expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
+ expect(Gitlab::Prometheus::Internal).to receive(:uri).and_return('http://prom:9090')
+ end
+
+ it 'contains a topology element' do
+ allow_prometheus_queries
+
+ expect(subject).to have_key(:topology)
+ end
+
+ context 'tracking node metrics' do
+ it 'contains node level metrics for each instance' do
+ expect_prometheus_api_to(
+ receive_node_memory_query,
+ receive_node_cpu_count_query,
+ receive_node_service_memory_query,
+ receive_node_service_process_count_query
+ )
+
+ expect(subject[:topology]).to eq({
+ duration_s: 0,
+ nodes: [
+ {
+ node_memory_total_bytes: 512,
+ node_cpus: 8,
+ node_services: [
+ {
+ name: 'web',
+ process_count: 10,
+ process_memory_rss: 300,
+ process_memory_uss: 301,
+ process_memory_pss: 302
+ },
+ {
+ name: 'sidekiq',
+ process_count: 5,
+ process_memory_rss: 303
+ }
+ ]
+ },
+ {
+ node_memory_total_bytes: 1024,
+ node_cpus: 16,
+ node_services: [
+ {
+ name: 'sidekiq',
+ process_count: 15,
+ process_memory_rss: 400,
+ process_memory_pss: 401
+ },
+ {
+ name: 'redis',
+ process_count: 1,
+ process_memory_rss: 402
+ }
+ ]
+ }
+ ]
+ })
+ end
+ end
+
+ context 'and some node memory metrics are missing' do
+ it 'removes the respective entries' do
+ expect_prometheus_api_to(
+ receive_node_memory_query(result: []),
+ receive_node_cpu_count_query,
+ receive_node_service_memory_query,
+ receive_node_service_process_count_query
+ )
+
+ keys = subject[:topology][:nodes].flat_map(&:keys)
+ expect(keys).not_to include(:node_memory_total_bytes)
+ expect(keys).to include(:node_cpus, :node_services)
+ end
+ end
+
+ context 'and no results are found' do
+ it 'does not report anything' do
+ expect_prometheus_api_to receive(:aggregate).at_least(:once).and_return({})
+
+ expect(subject[:topology]).to eq({
+ duration_s: 0,
+ nodes: []
+ })
+ end
+ end
+
+ context 'and a connection error is raised' do
+ it 'does not report anything' do
+ expect_prometheus_api_to receive(:aggregate).and_raise('Connection failed')
+
+ expect(subject[:topology]).to eq({ duration_s: 0 })
+ end
+ end
+ end
+
+ context 'when embedded Prometheus server is disabled' do
+ it 'does not report anything' do
+ expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
+
+ expect(subject[:topology]).to eq({ duration_s: 0 })
+ end
+ end
+ end
+
+ def receive_node_memory_query(result: nil)
+ receive(:query)
+ .with(/node_memory_MemTotal_bytes/, an_instance_of(Hash))
+ .and_return(result || [
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '512']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '1024']
+ }
+ ])
+ end
+
+ def receive_node_cpu_count_query(result: nil)
+ receive(:query)
+ .with(/node_cpu_seconds_total/, an_instance_of(Hash))
+ .and_return(result || [
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '16']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '8']
+ }
+ ])
+ end
+
+ def receive_node_service_memory_query(result: nil)
+ receive(:query)
+ .with(/process_.+_memory_bytes/, an_instance_of(Hash))
+ .and_return(result || [
+ # instance 1: runs Puma + a small Sidekiq
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', '__name__' => 'ruby_process_resident_memory_bytes' },
+ 'value' => [1000, '300']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', '__name__' => 'ruby_process_unique_memory_bytes' },
+ 'value' => [1000, '301']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails', '__name__' => 'ruby_process_proportional_memory_bytes' },
+ 'value' => [1000, '302']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq', '__name__' => 'ruby_process_resident_memory_bytes' },
+ 'value' => [1000, '303']
+ },
+ # instance 2: runs a dedicated Sidekiq + Redis (which uses a different metric name)
+ {
+ 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq', '__name__' => 'ruby_process_resident_memory_bytes' },
+ 'value' => [1000, '400']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq', '__name__' => 'ruby_process_proportional_memory_bytes' },
+ 'value' => [1000, '401']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis', '__name__' => 'process_resident_memory_bytes' },
+ 'value' => [1000, '402']
+ }
+ ])
+ end
+
+ def receive_node_service_process_count_query(result: nil)
+ receive(:query)
+ .with(/process_start_time_seconds/, an_instance_of(Hash))
+ .and_return(result || [
+ # instance 1
+ {
+ 'metric' => { 'instance' => 'instance1:8080', 'job' => 'gitlab-rails' },
+ 'value' => [1000, '10']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8090', 'job' => 'gitlab-sidekiq' },
+ 'value' => [1000, '5']
+ },
+ # instance 2
+ {
+ 'metric' => { 'instance' => 'instance2:8090', 'job' => 'gitlab-sidekiq' },
+ 'value' => [1000, '15']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:9121', 'job' => 'redis' },
+ 'value' => [1000, '1']
+ },
+ # unknown service => should be stripped out
+ {
+ 'metric' => { 'instance' => 'instance2:9000', 'job' => 'not-a-gitlab-service' },
+ 'value' => [1000, '42']
+ }
+ ])
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/search_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/search_counter_spec.rb
index 50a9f980dc7..35b0f9a67f4 100644
--- a/spec/lib/gitlab/usage_data_counters/search_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/search_counter_spec.rb
@@ -3,11 +3,21 @@
require 'spec_helper'
describe Gitlab::UsageDataCounters::SearchCounter, :clean_gitlab_redis_shared_state do
- it 'increments counter and return the total count' do
- expect(described_class.total_navbar_searches_count).to eq(0)
+ shared_examples_for 'usage counter with totals' do |counter|
+ it 'increments counter and returns total count' do
+ expect(described_class.read(counter)).to eq(0)
- 2.times { described_class.increment_navbar_searches_count }
+ 2.times { described_class.count(counter) }
- expect(described_class.total_navbar_searches_count).to eq(2)
+ expect(described_class.read(counter)).to eq(2)
+ end
+ end
+
+ context 'all_searches counter' do
+ it_behaves_like 'usage counter with totals', :all_searches
+ end
+
+ context 'navbar_searches counter' do
+ it_behaves_like 'usage counter with totals', :navbar_searches
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 9c6aab10083..31176999333 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -6,637 +6,600 @@ describe Gitlab::UsageData, :aggregate_failures do
include UsageDataHelpers
before do
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
-
+ stub_usage_data_connections
stub_object_store_settings
end
- shared_examples "usage data execution" do
- describe '#data' do
- let!(:ud) { build(:usage_data) }
-
- before do
- allow(described_class).to receive(:grafana_embed_usage_data).and_return(2)
+ describe '#uncached_data' do
+ it 'ensures recorded_at is set before any other usage data calculation' do
+ %i(alt_usage_data redis_usage_data distinct_count count).each do |method|
+ expect(described_class).not_to receive(method)
end
+ expect(described_class).to receive(:recorded_at).and_raise(Exception.new('Stopped calculating recorded_at'))
- subject { described_class.data }
+ expect { described_class.uncached_data }.to raise_error('Stopped calculating recorded_at')
+ end
+ end
- it 'gathers usage data' do
- expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS)
- end
+ describe '#data' do
+ let!(:ud) { build(:usage_data) }
- it 'gathers usage counts' do
- count_data = subject[:counts]
+ before do
+ allow(described_class).to receive(:grafana_embed_usage_data).and_return(2)
+ end
- expect(count_data[:boards]).to eq(1)
- expect(count_data[:projects]).to eq(4)
- expect(count_data.values_at(*UsageDataHelpers::SMAU_KEYS)).to all(be_an(Integer))
- expect(count_data.keys).to include(*UsageDataHelpers::COUNTS_KEYS)
- expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty
- end
+ subject { described_class.data }
- it 'gathers projects data correctly' do
- count_data = subject[:counts]
-
- expect(count_data[:projects]).to eq(4)
- expect(count_data[:projects_asana_active]).to eq(0)
- expect(count_data[:projects_prometheus_active]).to eq(1)
- expect(count_data[:projects_jira_active]).to eq(4)
- expect(count_data[:projects_jira_server_active]).to eq(2)
- expect(count_data[:projects_jira_cloud_active]).to eq(2)
- expect(count_data[:jira_imports_projects_count]).to eq(2)
- expect(count_data[:jira_imports_total_imported_count]).to eq(3)
- expect(count_data[:jira_imports_total_imported_issues_count]).to eq(13)
- expect(count_data[:projects_slack_notifications_active]).to eq(2)
- expect(count_data[:projects_slack_slash_active]).to eq(1)
- expect(count_data[:projects_slack_active]).to eq(2)
- expect(count_data[:projects_slack_slash_commands_active]).to eq(1)
- expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
- expect(count_data[:projects_mattermost_active]).to eq(0)
- expect(count_data[:projects_with_repositories_enabled]).to eq(3)
- expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
- expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
- expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
- expect(count_data[:issues_created_from_gitlab_error_tracking_ui]).to eq(1)
- expect(count_data[:issues_with_associated_zoom_link]).to eq(2)
- expect(count_data[:issues_using_zoom_quick_actions]).to eq(3)
- expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2)
- expect(count_data[:incident_issues]).to eq(4)
- expect(count_data[:issues_created_gitlab_alerts]).to eq(1)
- expect(count_data[:alert_bot_incident_issues]).to eq(4)
- expect(count_data[:incident_labeled_issues]).to eq(3)
-
- expect(count_data[:clusters_enabled]).to eq(6)
- expect(count_data[:project_clusters_enabled]).to eq(4)
- expect(count_data[:group_clusters_enabled]).to eq(1)
- expect(count_data[:instance_clusters_enabled]).to eq(1)
- expect(count_data[:clusters_disabled]).to eq(3)
- expect(count_data[:project_clusters_disabled]).to eq(1)
- expect(count_data[:group_clusters_disabled]).to eq(1)
- expect(count_data[:instance_clusters_disabled]).to eq(1)
- expect(count_data[:clusters_platforms_eks]).to eq(1)
- expect(count_data[:clusters_platforms_gke]).to eq(1)
- expect(count_data[:clusters_platforms_user]).to eq(1)
- expect(count_data[:clusters_applications_helm]).to eq(1)
- expect(count_data[:clusters_applications_ingress]).to eq(1)
- expect(count_data[:clusters_applications_cert_managers]).to eq(1)
- expect(count_data[:clusters_applications_crossplane]).to eq(1)
- expect(count_data[:clusters_applications_prometheus]).to eq(1)
- expect(count_data[:clusters_applications_runner]).to eq(1)
- expect(count_data[:clusters_applications_knative]).to eq(1)
- expect(count_data[:clusters_applications_elastic_stack]).to eq(1)
- expect(count_data[:grafana_integrated_projects]).to eq(2)
- expect(count_data[:clusters_applications_jupyter]).to eq(1)
- expect(count_data[:clusters_management_project]).to eq(1)
- end
+ it 'gathers usage data' do
+ expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS)
+ end
- it 'gathers object store usage correctly' do
- expect(subject[:object_store]).to eq(
- { artifacts: { enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } },
- external_diffs: { enabled: false },
- lfs: { enabled: true, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
- uploads: { enabled: nil, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
- packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } }
- )
- end
+ it 'gathers usage counts' do
+ count_data = subject[:counts]
- context 'with existing container expiration policies' do
- let_it_be(:disabled) { create(:container_expiration_policy, enabled: false) }
- let_it_be(:enabled) { create(:container_expiration_policy, enabled: true) }
+ expect(count_data[:boards]).to eq(1)
+ expect(count_data[:projects]).to eq(4)
+ expect(count_data.values_at(*UsageDataHelpers::SMAU_KEYS)).to all(be_an(Integer))
+ expect(count_data.keys).to include(*UsageDataHelpers::COUNTS_KEYS)
+ expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty
+ end
- %i[keep_n cadence older_than].each do |attribute|
- ContainerExpirationPolicy.send("#{attribute}_options").keys.each do |value|
- let_it_be("container_expiration_policy_with_#{attribute}_set_to_#{value}") { create(:container_expiration_policy, attribute => value) }
- end
- end
+ it 'gathers projects data correctly' do
+ count_data = subject[:counts]
+
+ expect(count_data[:projects]).to eq(4)
+ expect(count_data[:projects_asana_active]).to eq(0)
+ expect(count_data[:projects_prometheus_active]).to eq(1)
+ expect(count_data[:projects_jira_active]).to eq(4)
+ expect(count_data[:projects_jira_server_active]).to eq(2)
+ expect(count_data[:projects_jira_cloud_active]).to eq(2)
+ expect(count_data[:jira_imports_projects_count]).to eq(2)
+ expect(count_data[:jira_imports_total_imported_count]).to eq(3)
+ expect(count_data[:jira_imports_total_imported_issues_count]).to eq(13)
+ expect(count_data[:projects_slack_notifications_active]).to eq(2)
+ expect(count_data[:projects_slack_slash_active]).to eq(1)
+ expect(count_data[:projects_slack_active]).to eq(2)
+ expect(count_data[:projects_slack_slash_commands_active]).to eq(1)
+ expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
+ expect(count_data[:projects_mattermost_active]).to eq(0)
+ expect(count_data[:projects_with_repositories_enabled]).to eq(3)
+ expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
+ expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
+ expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
+ expect(count_data[:projects_with_terraform_reports]).to eq(2)
+ expect(count_data[:projects_with_terraform_states]).to eq(2)
+ expect(count_data[:terraform_reports]).to eq(6)
+ expect(count_data[:terraform_states]).to eq(3)
+ expect(count_data[:issues_created_from_gitlab_error_tracking_ui]).to eq(1)
+ expect(count_data[:issues_with_associated_zoom_link]).to eq(2)
+ expect(count_data[:issues_using_zoom_quick_actions]).to eq(3)
+ expect(count_data[:issues_with_embedded_grafana_charts_approx]).to eq(2)
+ expect(count_data[:incident_issues]).to eq(4)
+ expect(count_data[:issues_created_gitlab_alerts]).to eq(1)
+ expect(count_data[:issues_created_from_alerts]).to eq(3)
+ expect(count_data[:issues_created_manually_from_alerts]).to eq(1)
+ expect(count_data[:alert_bot_incident_issues]).to eq(4)
+ expect(count_data[:incident_labeled_issues]).to eq(3)
+
+ expect(count_data[:clusters_enabled]).to eq(6)
+ expect(count_data[:project_clusters_enabled]).to eq(4)
+ expect(count_data[:group_clusters_enabled]).to eq(1)
+ expect(count_data[:instance_clusters_enabled]).to eq(1)
+ expect(count_data[:clusters_disabled]).to eq(3)
+ expect(count_data[:project_clusters_disabled]).to eq(1)
+ expect(count_data[:group_clusters_disabled]).to eq(1)
+ expect(count_data[:instance_clusters_disabled]).to eq(1)
+ expect(count_data[:clusters_platforms_eks]).to eq(1)
+ expect(count_data[:clusters_platforms_gke]).to eq(1)
+ expect(count_data[:clusters_platforms_user]).to eq(1)
+ expect(count_data[:clusters_applications_helm]).to eq(1)
+ expect(count_data[:clusters_applications_ingress]).to eq(1)
+ expect(count_data[:clusters_applications_cert_managers]).to eq(1)
+ expect(count_data[:clusters_applications_crossplane]).to eq(1)
+ expect(count_data[:clusters_applications_prometheus]).to eq(1)
+ expect(count_data[:clusters_applications_runner]).to eq(1)
+ expect(count_data[:clusters_applications_knative]).to eq(1)
+ expect(count_data[:clusters_applications_elastic_stack]).to eq(1)
+ expect(count_data[:grafana_integrated_projects]).to eq(2)
+ expect(count_data[:clusters_applications_jupyter]).to eq(1)
+ expect(count_data[:clusters_management_project]).to eq(1)
+ end
+
+ it 'gathers object store usage correctly' do
+ expect(subject[:object_store]).to eq(
+ { artifacts: { enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } },
+ external_diffs: { enabled: false },
+ lfs: { enabled: true, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
+ uploads: { enabled: nil, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
+ packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } }
+ )
+ end
+
+ it 'gathers topology data' do
+ expect(subject.keys).to include(:topology)
+ end
+
+ context 'with existing container expiration policies' do
+ let_it_be(:disabled) { create(:container_expiration_policy, enabled: false) }
+ let_it_be(:enabled) { create(:container_expiration_policy, enabled: true) }
- let(:inactive_policies) { ::ContainerExpirationPolicy.where(enabled: false) }
- let(:active_policies) { ::ContainerExpirationPolicy.active }
-
- subject { described_class.data[:counts] }
-
- it 'gathers usage data' do
- expect(subject[:projects_with_expiration_policy_enabled]).to eq 20
- expect(subject[:projects_with_expiration_policy_disabled]).to eq 1
-
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 14
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_5]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_25]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_50]).to eq 1
-
- expect(subject[:projects_with_expiration_policy_enabled_with_older_than_unset]).to eq 16
- expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_7d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_14d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_30d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 1
-
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 12
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_7d]).to eq 5
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_14d]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1month]).to eq 1
- expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_3month]).to eq 1
+ %i[keep_n cadence older_than].each do |attribute|
+ ContainerExpirationPolicy.send("#{attribute}_options").keys.each do |value|
+ let_it_be("container_expiration_policy_with_#{attribute}_set_to_#{value}") { create(:container_expiration_policy, attribute => value) }
end
end
- it 'works when queries time out' do
- allow_any_instance_of(ActiveRecord::Relation)
- .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ let_it_be('container_expiration_policy_with_keep_n_set_to_null') { create(:container_expiration_policy, keep_n: nil) }
+ let_it_be('container_expiration_policy_with_older_than_set_to_null') { create(:container_expiration_policy, older_than: nil) }
- expect { subject }.not_to raise_error
- end
+ let(:inactive_policies) { ::ContainerExpirationPolicy.where(enabled: false) }
+ let(:active_policies) { ::ContainerExpirationPolicy.active }
- it 'jira usage works when queries time out' do
- allow_any_instance_of(ActiveRecord::Relation)
- .to receive(:find_in_batches).and_raise(ActiveRecord::StatementInvalid.new(''))
+ subject { described_class.data[:counts] }
- expect { described_class.jira_usage }.not_to raise_error
+ it 'gathers usage data' do
+ expect(subject[:projects_with_expiration_policy_enabled]).to eq 22
+ expect(subject[:projects_with_expiration_policy_disabled]).to eq 1
+
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_unset]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_1]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_5]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_10]).to eq 16
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_25]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_keep_n_set_to_50]).to eq 1
+
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_unset]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_7d]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_14d]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_30d]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_older_than_set_to_90d]).to eq 18
+
+ expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1d]).to eq 18
+ expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_7d]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_14d]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_1month]).to eq 1
+ expect(subject[:projects_with_expiration_policy_enabled_with_cadence_set_to_3month]).to eq 1
end
end
- describe '#usage_data_counters' do
- subject { described_class.usage_data_counters }
+ it 'works when queries time out' do
+ allow_any_instance_of(ActiveRecord::Relation)
+ .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
- it { is_expected.to all(respond_to :totals) }
- it { is_expected.to all(respond_to :fallback_totals) }
+ expect { subject }.not_to raise_error
+ end
- describe 'the results of calling #totals on all objects in the array' do
- subject { described_class.usage_data_counters.map(&:totals) }
+ it 'jira usage works when queries time out' do
+ allow_any_instance_of(ActiveRecord::Relation)
+ .to receive(:find_in_batches).and_raise(ActiveRecord::StatementInvalid.new(''))
- it { is_expected.to all(be_a Hash) }
- it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer))) }
- end
+ expect { described_class.jira_usage }.not_to raise_error
+ end
+ end
- describe 'the results of calling #fallback_totals on all objects in the array' do
- subject { described_class.usage_data_counters.map(&:fallback_totals) }
+ describe '#usage_data_counters' do
+ subject { described_class.usage_data_counters }
- it { is_expected.to all(be_a Hash) }
- it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(eq(-1)))) }
- end
+ it { is_expected.to all(respond_to :totals) }
+ it { is_expected.to all(respond_to :fallback_totals) }
- it 'does not have any conflicts' do
- all_keys = subject.flat_map { |counter| counter.totals.keys }
+ describe 'the results of calling #totals on all objects in the array' do
+ subject { described_class.usage_data_counters.map(&:totals) }
- expect(all_keys.size).to eq all_keys.to_set.size
- end
+ it { is_expected.to all(be_a Hash) }
+ it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(be_a Integer))) }
end
- describe '#license_usage_data' do
- subject { described_class.license_usage_data }
+ describe 'the results of calling #fallback_totals on all objects in the array' do
+ subject { described_class.usage_data_counters.map(&:fallback_totals) }
- it 'gathers license data' do
- expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
- expect(subject[:version]).to eq(Gitlab::VERSION)
- expect(subject[:installation_type]).to eq('gitlab-development-kit')
- expect(subject[:active_user_count]).to eq(User.active.size)
- expect(subject[:recorded_at]).to be_a(Time)
- end
+ it { is_expected.to all(be_a Hash) }
+ it { is_expected.to all(have_attributes(keys: all(be_a Symbol), values: all(eq(-1)))) }
end
- describe '.recording_ce_finished_at' do
- subject { described_class.recording_ce_finish_data }
+ it 'does not have any conflicts' do
+ all_keys = subject.flat_map { |counter| counter.totals.keys }
- it 'gathers time ce recording finishes at' do
- expect(subject[:recording_ce_finished_at]).to be_a(Time)
- end
+ expect(all_keys.size).to eq all_keys.to_set.size
end
+ end
- context 'when not relying on database records' do
- describe '#features_usage_data_ce' do
- subject { described_class.features_usage_data_ce }
-
- it 'gathers feature usage data' do
- expect(subject[:mattermost_enabled]).to eq(Gitlab.config.mattermost.enabled)
- expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
- expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
- expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?)
- expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?)
- expect(subject[:reply_by_email_enabled]).to eq(Gitlab::IncomingEmail.enabled?)
- expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
- expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled)
- expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
- expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?)
- expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?)
- end
+ describe '#license_usage_data' do
+ subject { described_class.license_usage_data }
- context 'with embedded grafana' do
- it 'returns true when embedded grafana is enabled' do
- stub_application_setting(grafana_enabled: true)
+ it 'gathers license data' do
+ expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid)
+ expect(subject[:version]).to eq(Gitlab::VERSION)
+ expect(subject[:installation_type]).to eq('gitlab-development-kit')
+ expect(subject[:active_user_count]).to eq(User.active.size)
+ expect(subject[:recorded_at]).to be_a(Time)
+ end
+ end
- expect(subject[:grafana_link_enabled]).to eq(true)
- end
+ describe '.recording_ce_finished_at' do
+ subject { described_class.recording_ce_finish_data }
+
+ it 'gathers time ce recording finishes at' do
+ expect(subject[:recording_ce_finished_at]).to be_a(Time)
+ end
+ end
- it 'returns false when embedded grafana is disabled' do
- stub_application_setting(grafana_enabled: false)
+ context 'when not relying on database records' do
+ describe '#features_usage_data_ce' do
+ subject { described_class.features_usage_data_ce }
+
+ it 'gathers feature usage data', :aggregate_failures do
+ expect(subject[:instance_auto_devops_enabled]).to eq(Gitlab::CurrentSettings.auto_devops_enabled?)
+ expect(subject[:mattermost_enabled]).to eq(Gitlab.config.mattermost.enabled)
+ expect(subject[:signup_enabled]).to eq(Gitlab::CurrentSettings.allow_signup?)
+ expect(subject[:ldap_enabled]).to eq(Gitlab.config.ldap.enabled)
+ expect(subject[:gravatar_enabled]).to eq(Gitlab::CurrentSettings.gravatar_enabled?)
+ expect(subject[:omniauth_enabled]).to eq(Gitlab::Auth.omniauth_enabled?)
+ expect(subject[:reply_by_email_enabled]).to eq(Gitlab::IncomingEmail.enabled?)
+ expect(subject[:container_registry_enabled]).to eq(Gitlab.config.registry.enabled)
+ expect(subject[:dependency_proxy_enabled]).to eq(Gitlab.config.dependency_proxy.enabled)
+ expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
+ expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?)
+ expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?)
+ end
- expect(subject[:grafana_link_enabled]).to eq(false)
- end
+ context 'with embedded grafana' do
+ it 'returns true when embedded grafana is enabled' do
+ stub_application_setting(grafana_enabled: true)
+
+ expect(subject[:grafana_link_enabled]).to eq(true)
end
- end
- describe '#components_usage_data' do
- subject { described_class.components_usage_data }
-
- it 'gathers components usage data' do
- expect(Gitlab::UsageData).to receive(:app_server_type).and_return('server_type')
- expect(subject[:app_server][:type]).to eq('server_type')
- expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
- expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
- expect(subject[:git][:version]).to eq(Gitlab::Git.version)
- expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
- expect(subject[:database][:version]).to eq(Gitlab::Database.version)
- expect(subject[:gitaly][:version]).to be_present
- expect(subject[:gitaly][:servers]).to be >= 1
- expect(subject[:gitaly][:filesystems]).to be_an(Array)
- expect(subject[:gitaly][:filesystems].first).to be_a(String)
+ it 'returns false when embedded grafana is disabled' do
+ stub_application_setting(grafana_enabled: false)
+
+ expect(subject[:grafana_link_enabled]).to eq(false)
end
end
+ end
- describe '#app_server_type' do
- subject { described_class.app_server_type }
+ describe '#components_usage_data' do
+ subject { described_class.components_usage_data }
+
+ it 'gathers basic components usage data' do
+ stub_runtime(:puma)
+
+ expect(subject[:app_server][:type]).to eq('puma')
+ expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
+ expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
+ expect(subject[:git][:version]).to eq(Gitlab::Git.version)
+ expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
+ expect(subject[:database][:version]).to eq(Gitlab::Database.version)
+ expect(subject[:gitaly][:version]).to be_present
+ expect(subject[:gitaly][:servers]).to be >= 1
+ expect(subject[:gitaly][:clusters]).to be >= 0
+ expect(subject[:gitaly][:filesystems]).to be_an(Array)
+ expect(subject[:gitaly][:filesystems].first).to be_a(String)
+ end
- it 'successfully identifies runtime and returns the identifier' do
- expect(Gitlab::Runtime).to receive(:identify).and_return(:runtime_identifier)
+ def stub_runtime(runtime)
+ allow(Gitlab::Runtime).to receive(:identify).and_return(runtime)
+ end
+ end
- is_expected.to eq('runtime_identifier')
- end
+ describe '#app_server_type' do
+ subject { described_class.app_server_type }
- context 'when runtime is not identified' do
- let(:exception) { Gitlab::Runtime::IdentificationError.new('exception message from runtime identify') }
+ it 'successfully identifies runtime and returns the identifier' do
+ expect(Gitlab::Runtime).to receive(:identify).and_return(:runtime_identifier)
- it 'logs the exception and returns unknown app server type' do
- expect(Gitlab::Runtime).to receive(:identify).and_raise(exception)
+ is_expected.to eq('runtime_identifier')
+ end
- expect(Gitlab::AppLogger).to receive(:error).with(exception.message)
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception)
- expect(subject).to eq('unknown_app_server_type')
- end
+ context 'when runtime is not identified' do
+ let(:exception) { Gitlab::Runtime::IdentificationError.new('exception message from runtime identify') }
+
+ it 'logs the exception and returns unknown app server type' do
+ expect(Gitlab::Runtime).to receive(:identify).and_raise(exception)
+
+ expect(Gitlab::AppLogger).to receive(:error).with(exception.message)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception)
+ expect(subject).to eq('unknown_app_server_type')
end
end
+ end
- describe '#object_store_config' do
- let(:component) { 'lfs' }
+ describe '#object_store_config' do
+ let(:component) { 'lfs' }
- subject { described_class.object_store_config(component) }
+ subject { described_class.object_store_config(component) }
- context 'when object_store is not configured' do
- it 'returns component enable status only' do
- allow(Settings).to receive(:[]).with(component).and_return({ 'enabled' => false })
+ context 'when object_store is not configured' do
+ it 'returns component enable status only' do
+ allow(Settings).to receive(:[]).with(component).and_return({ 'enabled' => false })
- expect(subject).to eq({ enabled: false })
- end
+ expect(subject).to eq({ enabled: false })
end
+ end
- context 'when object_store is configured' do
- it 'returns filtered object store config' do
- allow(Settings).to receive(:[]).with(component)
- .and_return(
+ context 'when object_store is configured' do
+ it 'returns filtered object store config' do
+ allow(Settings).to receive(:[]).with(component)
+ .and_return(
+ { 'enabled' => true,
+ 'object_store' =>
{ 'enabled' => true,
- 'object_store' =>
- { 'enabled' => true,
- 'remote_directory' => component,
- 'direct_upload' => true,
- 'connection' =>
- { 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true },
- 'background_upload' => false,
- 'proxy_download' => false } })
-
- expect(subject).to eq(
- { enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } })
- end
+ 'remote_directory' => component,
+ 'direct_upload' => true,
+ 'connection' =>
+ { 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true },
+ 'background_upload' => false,
+ 'proxy_download' => false } })
+
+ expect(subject).to eq(
+ { enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } })
end
+ end
- context 'when retrieve component setting meets exception' do
- it 'returns -1 for component enable status' do
- allow(Settings).to receive(:[]).with(component).and_raise(StandardError)
+ context 'when retrieve component setting meets exception' do
+ it 'returns -1 for component enable status' do
+ allow(Settings).to receive(:[]).with(component).and_raise(StandardError)
- expect(subject).to eq({ enabled: -1 })
- end
+ expect(subject).to eq({ enabled: -1 })
end
end
+ end
- describe '#object_store_usage_data' do
- subject { described_class.object_store_usage_data }
-
- it 'fetches object store config of five components' do
- %w(artifacts external_diffs lfs uploads packages).each do |component|
- expect(described_class).to receive(:object_store_config).with(component).and_return("#{component}_object_store_config")
- end
+ describe '#object_store_usage_data' do
+ subject { described_class.object_store_usage_data }
- expect(subject).to eq(
- object_store: {
- artifacts: 'artifacts_object_store_config',
- external_diffs: 'external_diffs_object_store_config',
- lfs: 'lfs_object_store_config',
- uploads: 'uploads_object_store_config',
- packages: 'packages_object_store_config'
- })
+ it 'fetches object store config of five components' do
+ %w(artifacts external_diffs lfs uploads packages).each do |component|
+ expect(described_class).to receive(:object_store_config).with(component).and_return("#{component}_object_store_config")
end
+
+ expect(subject).to eq(
+ object_store: {
+ artifacts: 'artifacts_object_store_config',
+ external_diffs: 'external_diffs_object_store_config',
+ lfs: 'lfs_object_store_config',
+ uploads: 'uploads_object_store_config',
+ packages: 'packages_object_store_config'
+ })
end
+ end
- describe '#cycle_analytics_usage_data' do
- subject { described_class.cycle_analytics_usage_data }
+ describe '#cycle_analytics_usage_data' do
+ subject { described_class.cycle_analytics_usage_data }
- it 'works when queries time out in new' do
- allow(Gitlab::CycleAnalytics::UsageData)
- .to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
+ it 'works when queries time out in new' do
+ allow(Gitlab::CycleAnalytics::UsageData)
+ .to receive(:new).and_raise(ActiveRecord::StatementInvalid.new(''))
- expect { subject }.not_to raise_error
- end
+ expect { subject }.not_to raise_error
+ end
- it 'works when queries time out in to_json' do
- allow_any_instance_of(Gitlab::CycleAnalytics::UsageData)
- .to receive(:to_json).and_raise(ActiveRecord::StatementInvalid.new(''))
+ it 'works when queries time out in to_json' do
+ allow_any_instance_of(Gitlab::CycleAnalytics::UsageData)
+ .to receive(:to_json).and_raise(ActiveRecord::StatementInvalid.new(''))
- expect { subject }.not_to raise_error
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ describe '#ingress_modsecurity_usage' do
+ subject { described_class.ingress_modsecurity_usage }
+
+ let(:environment) { create(:environment) }
+ let(:project) { environment.project }
+ let(:environment_scope) { '*' }
+ let(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
+ let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project]) }
+ let(:ingress_mode) { :modsecurity_blocking }
+ let!(:ingress) { create(:clusters_applications_ingress, ingress_mode, cluster: cluster) }
+
+ context 'when cluster is disabled' do
+ let(:cluster) { create(:cluster, :disabled, projects: [project]) }
+
+ it 'gathers ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(0)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(0)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
end
- describe '#ingress_modsecurity_usage' do
- subject { described_class.ingress_modsecurity_usage }
+ context 'when deployment is unsuccessful' do
+ let!(:deployment) { create(:deployment, :failed, environment: environment, project: project, cluster: cluster) }
- let(:environment) { create(:environment) }
- let(:project) { environment.project }
- let(:environment_scope) { '*' }
- let(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
- let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project]) }
- let(:ingress_mode) { :modsecurity_blocking }
- let!(:ingress) { create(:clusters_applications_ingress, ingress_mode, cluster: cluster) }
+ it 'gathers ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(0)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(0)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
+ end
+ end
- context 'when cluster is disabled' do
- let(:cluster) { create(:cluster, :disabled, projects: [project]) }
+ context 'when deployment is successful' do
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
+ context 'when modsecurity is in blocking mode' do
it 'gathers ingress data' do
expect(subject[:ingress_modsecurity_logging]).to eq(0)
- expect(subject[:ingress_modsecurity_blocking]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(1)
expect(subject[:ingress_modsecurity_disabled]).to eq(0)
expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
end
- context 'when deployment is unsuccessful' do
- let!(:deployment) { create(:deployment, :failed, environment: environment, project: project, cluster: cluster) }
+ context 'when modsecurity is in logging mode' do
+ let(:ingress_mode) { :modsecurity_logging }
it 'gathers ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_logging]).to eq(1)
expect(subject[:ingress_modsecurity_blocking]).to eq(0)
expect(subject[:ingress_modsecurity_disabled]).to eq(0)
expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
end
- context 'when deployment is successful' do
- let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
+ context 'when modsecurity is disabled' do
+ let(:ingress_mode) { :modsecurity_disabled }
- context 'when modsecurity is in blocking mode' do
- it 'gathers ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(0)
- expect(subject[:ingress_modsecurity_blocking]).to eq(1)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
- end
+ it 'gathers ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(0)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(1)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
+ end
- context 'when modsecurity is in logging mode' do
- let(:ingress_mode) { :modsecurity_logging }
+ context 'when modsecurity is not installed' do
+ let(:ingress_mode) { :modsecurity_not_installed }
- it 'gathers ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(1)
- expect(subject[:ingress_modsecurity_blocking]).to eq(0)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
- end
+ it 'gathers ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(0)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(0)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(1)
end
+ end
- context 'when modsecurity is disabled' do
- let(:ingress_mode) { :modsecurity_disabled }
+ context 'with multiple projects' do
+ let(:environment_2) { create(:environment) }
+ let(:project_2) { environment_2.project }
+ let(:cluster_2) { create(:cluster, environment_scope: environment_scope, projects: [project_2]) }
+ let!(:ingress_2) { create(:clusters_applications_ingress, :modsecurity_logging, cluster: cluster_2) }
+ let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster_2) }
- it 'gathers ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(0)
- expect(subject[:ingress_modsecurity_blocking]).to eq(0)
- expect(subject[:ingress_modsecurity_disabled]).to eq(1)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
- end
+ it 'gathers non-duplicated ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(1)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(1)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(0)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
+ end
- context 'when modsecurity is not installed' do
- let(:ingress_mode) { :modsecurity_not_installed }
-
- it 'gathers ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(0)
- expect(subject[:ingress_modsecurity_blocking]).to eq(0)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(1)
- end
- end
+ context 'with multiple deployments' do
+ let!(:deployment_2) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
- context 'with multiple projects' do
- let(:environment_2) { create(:environment) }
- let(:project_2) { environment_2.project }
- let(:cluster_2) { create(:cluster, environment_scope: environment_scope, projects: [project_2]) }
- let!(:ingress_2) { create(:clusters_applications_ingress, :modsecurity_logging, cluster: cluster_2) }
- let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster_2) }
-
- it 'gathers non-duplicated ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(1)
- expect(subject[:ingress_modsecurity_blocking]).to eq(1)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
- end
+ it 'gathers non-duplicated ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(1)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(0)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
+ end
- context 'with multiple deployments' do
- let!(:deployment_2) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
-
- it 'gathers non-duplicated ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(0)
- expect(subject[:ingress_modsecurity_blocking]).to eq(1)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
- end
- end
+ context 'with multiple projects' do
+ let(:environment_2) { create(:environment) }
+ let(:project_2) { environment_2.project }
+ let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster) }
+ let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project, project_2]) }
- context 'with multiple projects' do
- let(:environment_2) { create(:environment) }
- let(:project_2) { environment_2.project }
- let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project_2, cluster: cluster) }
- let(:cluster) { create(:cluster, environment_scope: environment_scope, projects: [project, project_2]) }
-
- it 'gathers ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(0)
- expect(subject[:ingress_modsecurity_blocking]).to eq(2)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
- end
+ it 'gathers ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(2)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(0)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
+ end
- context 'with multiple environments' do
- let!(:environment_2) { create(:environment, project: project) }
- let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project, cluster: cluster) }
+ context 'with multiple environments' do
+ let!(:environment_2) { create(:environment, project: project) }
+ let!(:deployment_2) { create(:deployment, :success, environment: environment_2, project: project, cluster: cluster) }
- it 'gathers ingress data' do
- expect(subject[:ingress_modsecurity_logging]).to eq(0)
- expect(subject[:ingress_modsecurity_blocking]).to eq(2)
- expect(subject[:ingress_modsecurity_disabled]).to eq(0)
- expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
- end
+ it 'gathers ingress data' do
+ expect(subject[:ingress_modsecurity_logging]).to eq(0)
+ expect(subject[:ingress_modsecurity_blocking]).to eq(2)
+ expect(subject[:ingress_modsecurity_disabled]).to eq(0)
+ expect(subject[:ingress_modsecurity_not_installed]).to eq(0)
end
end
end
+ end
- describe '#grafana_embed_usage_data' do
- subject { described_class.grafana_embed_usage_data }
+ describe '#grafana_embed_usage_data' do
+ subject { described_class.grafana_embed_usage_data }
- let(:project) { create(:project) }
- let(:description_with_embed) { "Some comment\n\nhttps://grafana.example.com/d/xvAk4q0Wk/go-processes?orgId=1&from=1573238522762&to=1573240322762&var-job=prometheus&var-interval=10m&panelId=1&fullscreen" }
- let(:description_with_unintegrated_embed) { "Some comment\n\nhttps://grafana.exp.com/d/xvAk4q0Wk/go-processes?orgId=1&from=1573238522762&to=1573240322762&var-job=prometheus&var-interval=10m&panelId=1&fullscreen" }
- let(:description_with_non_grafana_inline_metric) { "Some comment\n\n#{Gitlab::Routing.url_helpers.metrics_namespace_project_environment_url(*['foo', 'bar', 12])}" }
+ let(:project) { create(:project) }
+ let(:description_with_embed) { "Some comment\n\nhttps://grafana.example.com/d/xvAk4q0Wk/go-processes?orgId=1&from=1573238522762&to=1573240322762&var-job=prometheus&var-interval=10m&panelId=1&fullscreen" }
+ let(:description_with_unintegrated_embed) { "Some comment\n\nhttps://grafana.exp.com/d/xvAk4q0Wk/go-processes?orgId=1&from=1573238522762&to=1573240322762&var-job=prometheus&var-interval=10m&panelId=1&fullscreen" }
+ let(:description_with_non_grafana_inline_metric) { "Some comment\n\n#{Gitlab::Routing.url_helpers.metrics_namespace_project_environment_url(*['foo', 'bar', 12])}" }
- shared_examples "zero count" do
- it "does not count the issue" do
- expect(subject).to eq(0)
- end
+ shared_examples "zero count" do
+ it "does not count the issue" do
+ expect(subject).to eq(0)
end
+ end
- context 'with project grafana integration enabled' do
- before do
- create(:grafana_integration, project: project, enabled: true)
- end
-
- context 'with valid and invalid embeds' do
- before do
- # Valid
- create(:issue, project: project, description: description_with_embed)
- create(:issue, project: project, description: description_with_embed)
- # In-Valid
- create(:issue, project: project, description: description_with_unintegrated_embed)
- create(:issue, project: project, description: description_with_non_grafana_inline_metric)
- create(:issue, project: project, description: nil)
- create(:issue, project: project, description: '')
- create(:issue, project: project)
- end
-
- it 'counts only the issues with embeds' do
- expect(subject).to eq(2)
- end
- end
+ context 'with project grafana integration enabled' do
+ before do
+ create(:grafana_integration, project: project, enabled: true)
end
- context 'with project grafana integration disabled' do
+ context 'with valid and invalid embeds' do
before do
- create(:grafana_integration, project: project, enabled: false)
- end
-
- context 'with one issue having a grafana link in the description and one without' do
- before do
- create(:issue, project: project, description: description_with_embed)
- create(:issue, project: project)
- end
-
- it_behaves_like('zero count')
+ # Valid
+ create(:issue, project: project, description: description_with_embed)
+ create(:issue, project: project, description: description_with_embed)
+ # In-Valid
+ create(:issue, project: project, description: description_with_unintegrated_embed)
+ create(:issue, project: project, description: description_with_non_grafana_inline_metric)
+ create(:issue, project: project, description: nil)
+ create(:issue, project: project, description: '')
+ create(:issue, project: project)
end
- end
-
- context 'with an un-integrated project' do
- context 'with one issue having a grafana link in the description and one without' do
- before do
- create(:issue, project: project, description: description_with_embed)
- create(:issue, project: project)
- end
- it_behaves_like('zero count')
+ it 'counts only the issues with embeds' do
+ expect(subject).to eq(2)
end
end
end
- describe '#count' do
- let(:relation) { double(:relation) }
-
- it 'returns the count when counting succeeds' do
- allow(relation).to receive(:count).and_return(1)
-
- expect(described_class.count(relation, batch: false)).to eq(1)
+ context 'with project grafana integration disabled' do
+ before do
+ create(:grafana_integration, project: project, enabled: false)
end
- it 'returns the fallback value when counting fails' do
- stub_const("Gitlab::UsageData::FALLBACK", 15)
- allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'with one issue having a grafana link in the description and one without' do
+ before do
+ create(:issue, project: project, description: description_with_embed)
+ create(:issue, project: project)
+ end
- expect(described_class.count(relation, batch: false)).to eq(15)
+ it_behaves_like('zero count')
end
end
- describe '#distinct_count' do
- let(:relation) { double(:relation) }
-
- it 'returns the count when counting succeeds' do
- allow(relation).to receive(:distinct_count_by).and_return(1)
-
- expect(described_class.distinct_count(relation, batch: false)).to eq(1)
- end
-
- it 'returns the fallback value when counting fails' do
- stub_const("Gitlab::UsageData::FALLBACK", 15)
- allow(relation).to receive(:distinct_count_by).and_raise(ActiveRecord::StatementInvalid.new(''))
+ context 'with an un-integrated project' do
+ context 'with one issue having a grafana link in the description and one without' do
+ before do
+ create(:issue, project: project, description: description_with_embed)
+ create(:issue, project: project)
+ end
- expect(described_class.distinct_count(relation, batch: false)).to eq(15)
+ it_behaves_like('zero count')
end
end
end
end
- context 'when usage usage_ping_batch_counter is true' do
- before do
- stub_feature_flags(usage_ping_batch_counter: true)
- end
-
- it_behaves_like 'usage data execution'
- end
+ describe '#merge_requests_usage' do
+ let(:time_period) { { created_at: 2.days.ago..Time.current } }
+ let(:merge_request) { create(:merge_request) }
+ let(:other_user) { create(:user) }
+ let(:another_user) { create(:user) }
- context 'when usage usage_ping_batch_counter is false' do
before do
- stub_feature_flags(usage_ping_batch_counter: false)
- end
-
- it_behaves_like 'usage data execution'
- end
-
- describe '#alt_usage_data' do
- it 'returns the fallback when it gets an error' do
- expect(described_class.alt_usage_data { raise StandardError } ).to eq(-1)
- end
-
- it 'returns the evaluated block when give' do
- expect(described_class.alt_usage_data { Gitlab::CurrentSettings.uuid } ).to eq(Gitlab::CurrentSettings.uuid)
+ create(:event, target: merge_request, author: merge_request.author, created_at: 1.day.ago)
+ create(:event, target: merge_request, author: merge_request.author, created_at: 1.hour.ago)
+ create(:event, target: merge_request, author: merge_request.author, created_at: 3.days.ago)
+ create(:event, target: merge_request, author: other_user, created_at: 1.day.ago)
+ create(:event, target: merge_request, author: other_user, created_at: 1.hour.ago)
+ create(:event, target: merge_request, author: other_user, created_at: 3.days.ago)
+ create(:event, target: merge_request, author: another_user, created_at: 4.days.ago)
end
- it 'returns the value when given' do
- expect(described_class.alt_usage_data(1)).to eq 1
- end
- end
-
- describe '#redis_usage_data' do
- context 'with block given' do
- it 'returns the fallback when it gets an error' do
- expect(described_class.redis_usage_data { raise ::Redis::CommandError } ).to eq(-1)
- end
-
- it 'returns the evaluated block when given' do
- expect(described_class.redis_usage_data { 1 }).to eq(1)
- end
- end
-
- context 'with counter given' do
- it 'returns the falback values for all counter keys when it gets an error' do
- allow(::Gitlab::UsageDataCounters::WikiPageCounter).to receive(:totals).and_raise(::Redis::CommandError)
- expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter)).to eql(::Gitlab::UsageDataCounters::WikiPageCounter.fallback_totals)
- end
-
- it 'returns the totals when couter is given' do
- allow(::Gitlab::UsageDataCounters::WikiPageCounter).to receive(:totals).and_return({ wiki_pages_create: 2 })
- expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter)).to eql({ wiki_pages_create: 2 })
- end
+ it 'returns the distinct count of users using merge requests (via events table) within the specified time period' do
+ expect(described_class.merge_requests_usage(time_period)).to eq(
+ merge_requests_users: 2
+ )
end
end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
new file mode 100644
index 00000000000..7de615384c5
--- /dev/null
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Utils::UsageData do
+ describe '#count' do
+ let(:relation) { double(:relation) }
+
+ it 'returns the count when counting succeeds' do
+ allow(relation).to receive(:count).and_return(1)
+
+ expect(described_class.count(relation, batch: false)).to eq(1)
+ end
+
+ it 'returns the fallback value when counting fails' do
+ stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
+ allow(relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect(described_class.count(relation, batch: false)).to eq(15)
+ end
+ end
+
+ describe '#distinct_count' do
+ let(:relation) { double(:relation) }
+
+ it 'returns the count when counting succeeds' do
+ allow(relation).to receive(:distinct_count_by).and_return(1)
+
+ expect(described_class.distinct_count(relation, batch: false)).to eq(1)
+ end
+
+ it 'returns the fallback value when counting fails' do
+ stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
+ allow(relation).to receive(:distinct_count_by).and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect(described_class.distinct_count(relation, batch: false)).to eq(15)
+ end
+ end
+
+ describe '#alt_usage_data' do
+ it 'returns the fallback when it gets an error' do
+ expect(described_class.alt_usage_data { raise StandardError } ).to eq(-1)
+ end
+
+ it 'returns the evaluated block when give' do
+ expect(described_class.alt_usage_data { Gitlab::CurrentSettings.uuid } ).to eq(Gitlab::CurrentSettings.uuid)
+ end
+
+ it 'returns the value when given' do
+ expect(described_class.alt_usage_data(1)).to eq 1
+ end
+ end
+
+ describe '#redis_usage_data' do
+ context 'with block given' do
+ it 'returns the fallback when it gets an error' do
+ expect(described_class.redis_usage_data { raise ::Redis::CommandError } ).to eq(-1)
+ end
+
+ it 'returns the evaluated block when given' do
+ expect(described_class.redis_usage_data { 1 }).to eq(1)
+ end
+ end
+
+ context 'with counter given' do
+ it 'returns the falback values for all counter keys when it gets an error' do
+ allow(::Gitlab::UsageDataCounters::WikiPageCounter).to receive(:totals).and_raise(::Redis::CommandError)
+ expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter)).to eql(::Gitlab::UsageDataCounters::WikiPageCounter.fallback_totals)
+ end
+
+ it 'returns the totals when couter is given' do
+ allow(::Gitlab::UsageDataCounters::WikiPageCounter).to receive(:totals).and_return({ wiki_pages_create: 2 })
+ expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::WikiPageCounter)).to eql({ wiki_pages_create: 2 })
+ end
+ end
+ end
+
+ describe '#with_prometheus_client' do
+ context 'when Prometheus is enabled' do
+ it 'yields a client instance and returns the block result' do
+ expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
+ expect(Gitlab::Prometheus::Internal).to receive(:uri).and_return('http://prom:9090')
+
+ result = described_class.with_prometheus_client { |client| client }
+
+ expect(result).to be_an_instance_of(Gitlab::PrometheusClient)
+ end
+ end
+
+ context 'when Prometheus is disabled' do
+ it 'returns nil' do
+ expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
+
+ result = described_class.with_prometheus_client { |client| client }
+
+ expect(result).to be nil
+ end
+ end
+ end
+
+ describe '#measure_duration' do
+ it 'returns block result and execution duration' do
+ allow(Process).to receive(:clock_gettime).and_return(1, 3)
+
+ result, duration = described_class.measure_duration { 42 }
+
+ expect(result).to eq(42)
+ expect(duration).to eq(2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 0f0d6a93c97..3a2430d1f2d 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -345,4 +345,17 @@ describe Gitlab::Utils do
expect(described_class.parse_url(1)).to be nil
end
end
+
+ describe 'multiple_key_invert' do
+ it 'invert keys with array values' do
+ hash = {
+ dast: [:vulnerabilities_count, :scanned_resources_count],
+ sast: [:vulnerabilities_count]
+ }
+ expect(described_class.multiple_key_invert(hash)).to eq({
+ vulnerabilities_count: [:dast, :sast],
+ scanned_resources_count: [:dast]
+ })
+ end
+ end
end
diff --git a/spec/lib/gitlab/web_ide/config/entry/global_spec.rb b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
new file mode 100644
index 00000000000..04b0752c6fe
--- /dev/null
+++ b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::WebIde::Config::Entry::Global do
+ let(:global) { described_class.new(hash) }
+
+ describe '.nodes' do
+ it 'returns a hash' do
+ expect(described_class.nodes).to be_a(Hash)
+ end
+
+ context 'when filtering all the entry/node names' do
+ it 'contains the expected node names' do
+ expect(described_class.nodes.keys)
+ .to match_array(%i[terminal])
+ end
+ end
+ end
+
+ context 'when configuration is valid' do
+ context 'when some entries defined' do
+ let(:hash) do
+ { terminal: { before_script: ['ls'], variables: {}, script: 'sleep 10s', services: ['mysql'] } }
+ end
+
+ describe '#compose!' do
+ before do
+ global.compose!
+ end
+
+ it 'creates nodes hash' do
+ expect(global.descendants).to be_an Array
+ end
+
+ it 'creates node object for each entry' do
+ expect(global.descendants.count).to eq 1
+ end
+
+ it 'creates node object using valid class' do
+ expect(global.descendants.first)
+ .to be_an_instance_of Gitlab::WebIde::Config::Entry::Terminal
+ end
+
+ it 'sets correct description for nodes' do
+ expect(global.descendants.first.description)
+ .to eq 'Configuration of the webide terminal.'
+ end
+
+ describe '#leaf?' do
+ it 'is not leaf' do
+ expect(global).not_to be_leaf
+ end
+ end
+ end
+
+ context 'when not composed' do
+ describe '#terminal_value' do
+ it 'returns nil' do
+ expect(global.terminal_value).to be nil
+ end
+ end
+
+ describe '#leaf?' do
+ it 'is leaf' do
+ expect(global).to be_leaf
+ end
+ end
+ end
+
+ context 'when composed' do
+ before do
+ global.compose!
+ end
+
+ describe '#errors' do
+ it 'has no errors' do
+ expect(global.errors).to be_empty
+ end
+ end
+
+ describe '#terminal_value' do
+ it 'returns correct script' do
+ expect(global.terminal_value).to eq({
+ tag_list: [],
+ yaml_variables: [],
+ options: {
+ before_script: ['ls'],
+ script: ['sleep 10s'],
+ services: [{ name: "mysql" }]
+ }
+ })
+ end
+ end
+ end
+ end
+ end
+
+ context 'when configuration is not valid' do
+ before do
+ global.compose!
+ end
+
+ context 'when job does not have valid before script' do
+ let(:hash) do
+ { terminal: { before_script: 100 } }
+ end
+
+ describe '#errors' do
+ it 'reports errors about missing script' do
+ expect(global.errors)
+ .to include "terminal:before_script config should be an array containing strings and arrays of strings"
+ end
+ end
+ end
+ end
+
+ context 'when value is not a hash' do
+ let(:hash) { [] }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(global).not_to be_valid
+ end
+ end
+
+ describe '#errors' do
+ it 'returns error about invalid type' do
+ expect(global.errors.first).to match /should be a hash/
+ end
+ end
+ end
+
+ describe '#specified?' do
+ it 'is concrete entry that is defined' do
+ expect(global.specified?).to be true
+ end
+ end
+
+ describe '#[]' do
+ before do
+ global.compose!
+ end
+
+ let(:hash) do
+ { terminal: { before_script: ['ls'] } }
+ end
+
+ context 'when entry exists' do
+ it 'returns correct entry' do
+ expect(global[:terminal])
+ .to be_an_instance_of Gitlab::WebIde::Config::Entry::Terminal
+ expect(global[:terminal][:before_script].value).to eq ['ls']
+ end
+ end
+
+ context 'when entry does not exist' do
+ it 'always return unspecified node' do
+ expect(global[:some][:unknown][:node])
+ .not_to be_specified
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
new file mode 100644
index 00000000000..882e389e040
--- /dev/null
+++ b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
@@ -0,0 +1,156 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::WebIde::Config::Entry::Terminal do
+ let(:entry) { described_class.new(config, with_image_ports: true) }
+
+ describe '.nodes' do
+ context 'when filtering all the entry/node names' do
+ subject { described_class.nodes.keys }
+
+ let(:result) do
+ %i[before_script script image services variables]
+ end
+
+ it { is_expected.to match_array result }
+ end
+ end
+
+ describe 'validations' do
+ before do
+ entry.compose!
+ end
+
+ context 'when entry config value is correct' do
+ let(:config) { { script: 'rspec' } }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when the same port is not duplicated' do
+ let(:config) do
+ {
+ image: { name: "ruby", ports: [80] },
+ services: [{ name: "mysql", alias: "service1", ports: [81] }, { name: "mysql", alias: "service2", ports: [82] }]
+ }
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when unknown port keys detected' do
+ let(:config) do
+ {
+ image: { name: "ruby", ports: [80] },
+ services: [{ name: "mysql", alias: "service2", ports: [{ number: 81, invalid_key: 'foobar' }] }]
+ }
+ end
+
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.first)
+ .to match /port config contains unknown keys: invalid_key/
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ context 'incorrect config value type' do
+ let(:config) { ['incorrect'] }
+
+ describe '#errors' do
+ it 'reports error about a config type' do
+ expect(entry.errors)
+ .to include 'terminal config should be a hash'
+ end
+ end
+ end
+
+ context 'when config is empty' do
+ let(:config) { {} }
+
+ describe '#valid' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when unknown keys detected' do
+ let(:config) { { unknown: true } }
+
+ describe '#valid' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+
+ context 'when the same port is duplicated' do
+ let(:config) do
+ {
+ image: { name: "ruby", ports: [80] },
+ services: [{ name: "mysql", ports: [80] }, { name: "mysql", ports: [81] }]
+ }
+ end
+
+ describe '#valid?' do
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ expect(entry.errors.count).to eq 1
+ expect(entry.errors.first).to match "each port number can only be referenced once"
+ end
+ end
+ end
+ end
+ end
+
+ describe '#relevant?' do
+ it 'is a relevant entry' do
+ entry = described_class.new({ script: 'rspec' })
+
+ expect(entry).to be_relevant
+ end
+ end
+
+ context 'when composed' do
+ before do
+ entry.compose!
+ end
+
+ describe '#value' do
+ context 'when entry is correct' do
+ let(:config) do
+ { before_script: %w[ls pwd],
+ script: 'sleep 100',
+ tags: ['webide'],
+ image: 'ruby:2.5',
+ services: ['mysql'],
+ variables: { KEY: 'value' } }
+ end
+
+ it 'returns correct value' do
+ expect(entry.value)
+ .to eq(
+ tag_list: ['webide'],
+ yaml_variables: [{ key: 'KEY', value: 'value', public: true }],
+ options: {
+ image: { name: "ruby:2.5" },
+ services: [{ name: "mysql" }],
+ before_script: %w[ls pwd],
+ script: ['sleep 100']
+ }
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/web_ide/config_spec.rb b/spec/lib/gitlab/web_ide/config_spec.rb
new file mode 100644
index 00000000000..c1dafd01197
--- /dev/null
+++ b/spec/lib/gitlab/web_ide/config_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::WebIde::Config do
+ let(:config) do
+ described_class.new(yml)
+ end
+
+ context 'when config is valid' do
+ let(:yml) do
+ <<-EOS
+ terminal:
+ image: ruby:2.7
+ before_script:
+ - gem install rspec
+ EOS
+ end
+
+ describe '#to_hash' do
+ it 'returns hash created from string' do
+ hash = {
+ terminal: {
+ image: 'ruby:2.7',
+ before_script: ['gem install rspec']
+ }
+ }
+
+ expect(config.to_hash).to eq hash
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'has no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'when config is invalid' do
+ context 'when yml is incorrect' do
+ let(:yml) { '// invalid' }
+
+ describe '.new' do
+ it 'raises error' do
+ expect { config }.to raise_error(
+ described_class::ConfigError,
+ /Invalid configuration format/
+ )
+ end
+ end
+ end
+
+ context 'when config logic is incorrect' do
+ let(:yml) { 'terminal: { before_script: "ls" }' }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(config).not_to be_valid
+ end
+
+ it 'has errors' do
+ expect(config.errors).not_to be_empty
+ end
+ end
+
+ describe '#errors' do
+ it 'returns an array of strings' do
+ expect(config.errors).to all(be_an_instance_of(String))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
index f9ed769f2d9..01701589e63 100644
--- a/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
+++ b/spec/lib/gitlab/wiki_pages/front_matter_parser_spec.rb
@@ -7,7 +7,7 @@ describe Gitlab::WikiPages::FrontMatterParser do
let(:content) { 'This is the content' }
let(:end_divider) { '---' }
- let(:gate) { double('Gate') }
+ let(:gate) { stub_feature_flag_gate('Gate') }
let(:with_front_matter) do
<<~MD
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index 9362ff72fbc..84d072a50ec 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -96,6 +96,28 @@ describe Gitlab do
end
end
+ describe '.staging?' do
+ subject { described_class.staging? }
+
+ it 'is false when on GitLab.com' do
+ stub_config_setting(url: 'https://gitlab.com')
+
+ expect(subject).to eq false
+ end
+
+ it 'is true when on staging' do
+ stub_config_setting(url: 'https://staging.gitlab.com')
+
+ expect(subject).to eq true
+ end
+
+ it 'is false when not on staging' do
+ stub_config_setting(url: 'https://example.gitlab.com')
+
+ expect(subject).to eq false
+ end
+ end
+
describe '.canary?' do
it 'is true when CANARY env var is set to true' do
stub_env('CANARY', '1')
@@ -186,6 +208,26 @@ describe Gitlab do
end
end
+ describe '.dev_or_test_env?' do
+ subject { described_class.dev_or_test_env? }
+
+ it 'is true when test env' do
+ expect(subject).to eq true
+ end
+
+ it 'is true when dev env' do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development'))
+
+ expect(subject).to eq true
+ end
+
+ it 'is false when env is not dev or test' do
+ allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production'))
+
+ expect(subject).to eq false
+ end
+ end
+
describe '.ee?' do
before do
stub_env('FOSS_ONLY', nil) # Make sure the ENV is clean
diff --git a/spec/lib/milestone_array_spec.rb b/spec/lib/milestone_array_spec.rb
deleted file mode 100644
index 375cb87dde6..00000000000
--- a/spec/lib/milestone_array_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe MilestoneArray do
- let(:object1) { instance_double("BirdMilestone", due_date: Time.now, start_date: Time.now - 15.days, title: 'v2.0') }
- let(:object2) { instance_double("CatMilestone", due_date: Time.now - 1.day, start_date: nil, title: 'v1.0') }
- let(:object3) { instance_double("DogMilestone", due_date: nil, start_date: Time.now - 30.days, title: 'v3.0') }
- let(:array) { [object1, object3, object2] }
-
- describe '#sort' do
- it 'reorders array with due date in ascending order with nulls last' do
- expect(described_class.sort(array, 'due_date_asc')).to eq([object2, object1, object3])
- end
-
- it 'reorders array with due date in desc order with nulls last' do
- expect(described_class.sort(array, 'due_date_desc')).to eq([object1, object2, object3])
- end
-
- it 'reorders array with start date in ascending order with nulls last' do
- expect(described_class.sort(array, 'start_date_asc')).to eq([object3, object1, object2])
- end
-
- it 'reorders array with start date in descending order with nulls last' do
- expect(described_class.sort(array, 'start_date_desc')).to eq([object1, object3, object2])
- end
-
- it 'reorders array with title in ascending order' do
- expect(described_class.sort(array, 'name_asc')).to eq([object2, object1, object3])
- end
-
- it 'reorders array with title in descending order' do
- expect(described_class.sort(array, 'name_desc')).to eq([object3, object1, object2])
- end
- end
-end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index fae0c636bdc..c3890c72852 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -3,11 +3,17 @@
require 'spec_helper'
describe ObjectStorage::DirectUpload do
+ let(:region) { 'us-east-1' }
+ let(:path_style) { false }
+ let(:use_iam_profile) { false }
let(:credentials) do
{
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
- aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
+ region: region,
+ path_style: path_style,
+ use_iam_profile: use_iam_profile
}
end
@@ -57,6 +63,62 @@ describe ObjectStorage::DirectUpload do
describe '#to_hash' do
subject { direct_upload.to_hash }
+ shared_examples 'a valid S3 upload' do
+ it_behaves_like 'a valid upload'
+
+ it 'sets Workhorse client data' do
+ expect(subject[:UseWorkhorseClient]).to eq(use_iam_profile)
+ expect(subject[:RemoteTempObjectID]).to eq(object_name)
+
+ object_store_config = subject[:ObjectStorage]
+ expect(object_store_config[:Provider]).to eq 'AWS'
+
+ s3_config = object_store_config[:S3Config]
+ expect(s3_config[:Bucket]).to eq(bucket_name)
+ expect(s3_config[:Region]).to eq(region)
+ expect(s3_config[:PathStyle]).to eq(path_style)
+ expect(s3_config[:UseIamProfile]).to eq(use_iam_profile)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(use_workhorse_s3_client: false)
+ end
+
+ it 'does not enable Workhorse client' do
+ expect(subject[:UseWorkhorseClient]).to be false
+ end
+ end
+
+ context 'when V2 signatures are used' do
+ before do
+ credentials[:aws_signature_version] = 2
+ end
+
+ it 'does not enable Workhorse client' do
+ expect(subject[:UseWorkhorseClient]).to be false
+ end
+ end
+
+ context 'when V4 signatures are used' do
+ before do
+ credentials[:aws_signature_version] = 4
+ end
+
+ it 'enables the Workhorse client for instance profiles' do
+ expect(subject[:UseWorkhorseClient]).to eq(use_iam_profile)
+ end
+ end
+ end
+
+ shared_examples 'a valid Google upload' do
+ it_behaves_like 'a valid upload'
+
+ it 'does not set Workhorse client data' do
+ expect(subject.keys).not_to include(:UseWorkhorseClient, :RemoteTempObjectID, :ObjectStorage)
+ end
+ end
+
shared_examples 'a valid upload' do
it "returns valid structure" do
expect(subject).to have_key(:Timeout)
@@ -97,6 +159,16 @@ describe ObjectStorage::DirectUpload do
end
end
+ shared_examples 'a valid S3 upload without multipart data' do
+ it_behaves_like 'a valid S3 upload'
+ it_behaves_like 'a valid upload without multipart data'
+ end
+
+ shared_examples 'a valid S3 upload with multipart data' do
+ it_behaves_like 'a valid S3 upload'
+ it_behaves_like 'a valid upload with multipart data'
+ end
+
shared_examples 'a valid upload without multipart data' do
it_behaves_like 'a valid upload'
@@ -109,13 +181,50 @@ describe ObjectStorage::DirectUpload do
context 'when length is known' do
let(:has_length) { true }
- it_behaves_like 'a valid upload without multipart data'
+ it_behaves_like 'a valid S3 upload without multipart data'
+
+ context 'when path style is true' do
+ let(:path_style) { true }
+ let(:storage_url) { 'https://s3.amazonaws.com/uploads' }
+
+ before do
+ stub_object_storage_multipart_init(storage_url, "myUpload")
+ end
+
+ it_behaves_like 'a valid S3 upload without multipart data'
+ end
+
+ context 'when IAM profile is true' do
+ let(:use_iam_profile) { true }
+ let(:iam_credentials_url) { "http://169.254.169.254/latest/meta-data/iam/security-credentials/" }
+ let(:iam_credentials) do
+ {
+ 'AccessKeyId' => 'dummykey',
+ 'SecretAccessKey' => 'dummysecret',
+ 'Token' => 'dummytoken',
+ 'Expiration' => 1.day.from_now.xmlschema
+ }
+ end
+
+ before do
+ stub_request(:get, iam_credentials_url)
+ .to_return(status: 200, body: "somerole", headers: {})
+ stub_request(:get, "#{iam_credentials_url}somerole")
+ .to_return(status: 200, body: iam_credentials.to_json, headers: {})
+ end
+
+ it_behaves_like 'a valid S3 upload without multipart data'
+ end
end
context 'when length is unknown' do
let(:has_length) { false }
- it_behaves_like 'a valid upload with multipart data' do
+ it_behaves_like 'a valid S3 upload with multipart data' do
+ before do
+ stub_object_storage_multipart_init(storage_url, "myUpload")
+ end
+
context 'when maximum upload size is 10MB' do
let(:maximum_size) { 10.megabyte }
@@ -169,12 +278,14 @@ describe ObjectStorage::DirectUpload do
context 'when length is known' do
let(:has_length) { true }
+ it_behaves_like 'a valid Google upload'
it_behaves_like 'a valid upload without multipart data'
end
context 'when length is unknown' do
let(:has_length) { false }
+ it_behaves_like 'a valid Google upload'
it_behaves_like 'a valid upload without multipart data'
end
end
diff --git a/spec/lib/peek/views/bullet_detailed_spec.rb b/spec/lib/peek/views/bullet_detailed_spec.rb
new file mode 100644
index 00000000000..a482cadc7db
--- /dev/null
+++ b/spec/lib/peek/views/bullet_detailed_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Peek::Views::BulletDetailed do
+ subject { described_class.new }
+
+ before do
+ allow(Bullet).to receive(:enable?).and_return(bullet_enabled)
+ end
+
+ context 'bullet disabled' do
+ let(:bullet_enabled) { false }
+
+ it 'returns empty results' do
+ expect(subject.results).to eq({})
+ end
+ end
+
+ context 'bullet enabled' do
+ let(:bullet_enabled) { true }
+
+ before do
+ allow(Bullet).to receive_message_chain(:notification_collector, :collection).and_return(notifications)
+ end
+
+ context 'where there are no notifications' do
+ let(:notifications) { [] }
+
+ it 'returns empty results' do
+ expect(subject.results).to eq({})
+ end
+ end
+
+ context 'when notifications exist' do
+ let(:notifications) do
+ [
+ double(title: 'Title 1', body: 'Body 1', body_with_caller: "first\nsecond\n"),
+ double(title: 'Title 2', body: 'Body 2', body_with_caller: "first\nsecond\n")
+ ]
+ end
+
+ it 'returns empty results' do
+ expect(subject.key).to eq('bullet')
+ expect(subject.results[:calls]).to eq(2)
+ expect(subject.results[:warnings]).to eq([Peek::Views::BulletDetailed::WARNING_MESSAGE])
+ expect(subject.results[:details]).to eq([
+ { notification: 'Title 1: Body 1', backtrace: "first\nsecond\n" },
+ { notification: 'Title 2: Body 2', backtrace: "first\nsecond\n" }
+ ])
+ end
+ end
+ end
+end
diff --git a/spec/lib/peek/views/redis_detailed_spec.rb b/spec/lib/peek/views/redis_detailed_spec.rb
index fa9532226f2..a270c006a43 100644
--- a/spec/lib/peek/views/redis_detailed_spec.rb
+++ b/spec/lib/peek/views/redis_detailed_spec.rb
@@ -17,7 +17,7 @@ describe Peek::Views::RedisDetailed, :request_store do
with_them do
it 'scrubs Redis commands' do
- subject.detail_store << { cmd: cmd, duration: 1.second }
+ Gitlab::Instrumentation::Redis::SharedState.detail_store << { cmd: cmd, duration: 1.second }
expect(subject.results[:details].count).to eq(1)
expect(subject.results[:details].first)
@@ -29,11 +29,12 @@ describe Peek::Views::RedisDetailed, :request_store do
end
it 'returns aggregated results' do
- subject.detail_store << { cmd: [:get, 'test'], duration: 0.001 }
- subject.detail_store << { cmd: [:get, 'test'], duration: 1.second }
+ Gitlab::Instrumentation::Redis::Cache.detail_store << { cmd: [:get, 'test'], duration: 0.001 }
+ Gitlab::Instrumentation::Redis::Cache.detail_store << { cmd: [:get, 'test'], duration: 1.second }
+ Gitlab::Instrumentation::Redis::SharedState.detail_store << { cmd: [:get, 'test'], duration: 1.second }
- expect(subject.results[:calls]).to eq(2)
- expect(subject.results[:duration]).to eq('1001.00ms')
- expect(subject.results[:details].count).to eq(2)
+ expect(subject.results[:calls]).to eq(3)
+ expect(subject.results[:duration]).to eq('2001.00ms')
+ expect(subject.results[:details].count).to eq(3)
end
end
diff --git a/spec/lib/peek/views/rugged_spec.rb b/spec/lib/peek/views/rugged_spec.rb
index b9507f772d2..39968afed39 100644
--- a/spec/lib/peek/views/rugged_spec.rb
+++ b/spec/lib/peek/views/rugged_spec.rb
@@ -16,7 +16,7 @@ describe Peek::Views::Rugged, :request_store do
end
it 'returns aggregated results' do
- ::Gitlab::RuggedInstrumentation.query_time += 1.234
+ ::Gitlab::RuggedInstrumentation.add_query_time(1.234)
::Gitlab::RuggedInstrumentation.increment_query_count
::Gitlab::RuggedInstrumentation.increment_query_count
diff --git a/spec/lib/quality/test_level_spec.rb b/spec/lib/quality/test_level_spec.rb
index b784a92fa85..ad29c80b07a 100644
--- a/spec/lib/quality/test_level_spec.rb
+++ b/spec/lib/quality/test_level_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,channels,config,db,dependencies,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,serializers,services,sidekiq,support_specs,tasks,uploaders,validators,views,workers,elastic_integration}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,channels,config,db,dependencies,factories,finders,frontend,graphql,haml_lint,helpers,initializers,javascripts,lib,models,policies,presenters,rack_servers,replicators,routing,rubocop,serializers,services,sidekiq,support_specs,tasks,uploaders,validators,views,workers,elastic_integration,tooling}{,/**/}*_spec.rb")
end
end
@@ -89,7 +89,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|channels|config|db|dependencies|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|serializers|services|sidekiq|support_specs|tasks|uploaders|validators|views|workers|elastic_integration)})
+ .to eq(%r{spec/(bin|channels|config|db|dependencies|factories|finders|frontend|graphql|haml_lint|helpers|initializers|javascripts|lib|models|policies|presenters|rack_servers|replicators|routing|rubocop|serializers|services|sidekiq|support_specs|tasks|uploaders|validators|views|workers|elastic_integration|tooling)})
end
end
@@ -144,6 +144,10 @@ RSpec.describe Quality::TestLevel do
expect(subject.level_for('spec/models/abuse_report_spec.rb')).to eq(:unit)
end
+ it 'returns the correct level for a tooling test' do
+ expect(subject.level_for('spec/tooling/lib/tooling/test_file_finder_spec.rb')).to eq(:unit)
+ end
+
it 'returns the correct level for a migration test' do
expect(subject.level_for('spec/migrations/add_default_and_free_plans_spec.rb')).to eq(:migration)
end