summaryrefslogtreecommitdiff
path: root/spec/lib/gitlab
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-09-19 01:45:44 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-09-19 01:45:44 +0000
commit85dc423f7090da0a52c73eb66faf22ddb20efff9 (patch)
tree9160f299afd8c80c038f08e1545be119f5e3f1e1 /spec/lib/gitlab
parent15c2c8c66dbe422588e5411eee7e68f1fa440bb8 (diff)
downloadgitlab-ce-85dc423f7090da0a52c73eb66faf22ddb20efff9.tar.gz
Add latest changes from gitlab-org/gitlab@13-4-stable-ee
Diffstat (limited to 'spec/lib/gitlab')
-rw-r--r--spec/lib/gitlab/alert_management/alert_params_spec.rb4
-rw-r--r--spec/lib/gitlab/alert_management/payload/base_spec.rb210
-rw-r--r--spec/lib/gitlab/alert_management/payload/generic_spec.rb89
-rw-r--r--spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb167
-rw-r--r--spec/lib/gitlab/alert_management/payload/prometheus_spec.rb240
-rw-r--r--spec/lib/gitlab/alert_management/payload_spec.rb60
-rw-r--r--spec/lib/gitlab/alerting/notification_payload_parser_spec.rb18
-rw-r--r--spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb46
-rw-r--r--spec/lib/gitlab/anonymous_session_spec.rb43
-rw-r--r--spec/lib/gitlab/app_text_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/atlassian/auth_hash_spec.rb50
-rw-r--r--spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb71
-rw-r--r--spec/lib/gitlab/auth/atlassian/user_spec.rb60
-rw-r--r--spec/lib/gitlab/auth/ldap/adapter_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/ldap/config_spec.rb4
-rw-r--r--spec/lib/gitlab/auth/o_auth/provider_spec.rb44
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb53
-rw-r--r--spec/lib/gitlab/auth_spec.rb23
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb43
-rw-r--r--spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb19
-rw-r--r--spec/lib/gitlab/badge/coverage/template_spec.rb4
-rw-r--r--spec/lib/gitlab/badge/pipeline/status_spec.rb28
-rw-r--r--spec/lib/gitlab/badge/pipeline/template_spec.rb4
-rw-r--r--spec/lib/gitlab/bitbucket_server_import/importer_spec.rb356
-rw-r--r--spec/lib/gitlab/checks/lfs_integrity_spec.rb20
-rw-r--r--spec/lib/gitlab/checks/project_moved_spec.rb8
-rw-r--r--spec/lib/gitlab/checks/snippet_check_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/artifact_file_reader_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/config/entry/jobs_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/jwt_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb251
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb74
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb39
-rw-r--r--spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb111
-rw-r--r--spec/lib/gitlab/ci/reports/test_case_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/status/bridge/common_spec.rb49
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb51
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb39
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb806
-rw-r--r--spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb53
-rw-r--r--spec/lib/gitlab/conan_token_spec.rb2
-rw-r--r--spec/lib/gitlab/consul/internal_spec.rb139
-rw-r--r--spec/lib/gitlab/cycle_analytics/code_stage_spec.rb4
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb42
-rw-r--r--spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb6
-rw-r--r--spec/lib/gitlab/cycle_analytics/permissions_spec.rb12
-rw-r--r--spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb4
-rw-r--r--spec/lib/gitlab/cycle_analytics/production_stage_spec.rb9
-rw-r--r--spec/lib/gitlab/cycle_analytics/review_stage_spec.rb4
-rw-r--r--spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb2
-rw-r--r--spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb4
-rw-r--r--spec/lib/gitlab/cycle_analytics/test_stage_spec.rb2
-rw-r--r--spec/lib/gitlab/danger/changelog_spec.rb72
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb40
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb57
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb2
-rw-r--r--spec/lib/gitlab/database/background_migration_job_spec.rb9
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb10
-rw-r--r--spec/lib/gitlab/database/concurrent_reindex_spec.rb207
-rw-r--r--spec/lib/gitlab/database/custom_structure_spec.rb1
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb11
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb34
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb9
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb147
-rw-r--r--spec/lib/gitlab/database/schema_cleaner_spec.rb4
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb2
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb4
-rw-r--r--spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb4
-rw-r--r--spec/lib/gitlab/email/handler/create_issue_handler_spec.rb22
-rw-r--r--spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb24
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb20
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb6
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb75
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb13
-rw-r--r--spec/lib/gitlab/external_authorization/access_spec.rb10
-rw-r--r--spec/lib/gitlab/external_authorization/cache_spec.rb4
-rw-r--r--spec/lib/gitlab/file_type_detection_spec.rb39
-rw-r--r--spec/lib/gitlab/git/base_error_spec.rb23
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/labels_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/repository_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/label_finder_spec.rb8
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb4
-rw-r--r--spec/lib/gitlab/gitpod_spec.rb66
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb14
-rw-r--r--spec/lib/gitlab/gl_repository_spec.rb11
-rw-r--r--spec/lib/gitlab/graphql/docs/renderer_spec.rb48
-rw-r--r--spec/lib/gitlab/graphql/loaders/issuable_loader_spec.rb51
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb25
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb12
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb37
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb74
-rw-r--r--spec/lib/gitlab/hashed_storage/migrator_spec.rb12
-rw-r--r--spec/lib/gitlab/http_spec.rb11
-rw-r--r--spec/lib/gitlab/i18n/po_linter_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml24
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb3
-rw-r--r--spec/lib/gitlab/jira/dvcs_spec.rb58
-rw-r--r--spec/lib/gitlab/jira/middleware_spec.rb40
-rw-r--r--spec/lib/gitlab/kas_spec.rb61
-rw-r--r--spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb101
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/network_policy_spec.rb59
-rw-r--r--spec/lib/gitlab/lfs/client_spec.rb148
-rw-r--r--spec/lib/gitlab/log_timestamp_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/dashboard/importer_spec.rb55
-rw-r--r--spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb79
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb13
-rw-r--r--spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb99
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb103
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb13
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator_spec.rb52
-rw-r--r--spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb24
-rw-r--r--spec/lib/gitlab/metrics/instrumentation_spec.rb14
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb94
-rw-r--r--spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb2
-rw-r--r--spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb53
-rw-r--r--spec/lib/gitlab/middleware/multipart/handler_spec.rb53
-rw-r--r--spec/lib/gitlab/middleware/multipart_spec.rb313
-rw-r--r--spec/lib/gitlab/middleware/multipart_with_handler_for_jwt_params_spec.rb171
-rw-r--r--spec/lib/gitlab/middleware/multipart_with_handler_spec.rb144
-rw-r--r--spec/lib/gitlab/middleware/same_site_cookies_spec.rb49
-rw-r--r--spec/lib/gitlab/pages/settings_spec.rb32
-rw-r--r--spec/lib/gitlab/pages_transfer_spec.rb137
-rw-r--r--spec/lib/gitlab/phabricator_import/cache/map_spec.rb2
-rw-r--r--spec/lib/gitlab/project_authorizations_spec.rb60
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb290
-rw-r--r--spec/lib/gitlab/prometheus/internal_spec.rb28
-rw-r--r--spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus/queries/validate_query_spec.rb4
-rw-r--r--spec/lib/gitlab/prometheus_client_spec.rb32
-rw-r--r--spec/lib/gitlab/quick_actions/substitution_definition_spec.rb20
-rw-r--r--spec/lib/gitlab/reference_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb101
-rw-r--r--spec/lib/gitlab/relative_positioning/item_context_spec.rb215
-rw-r--r--spec/lib/gitlab/relative_positioning/mover_spec.rb487
-rw-r--r--spec/lib/gitlab/relative_positioning/range_spec.rb162
-rw-r--r--spec/lib/gitlab/repository_cache_adapter_spec.rb2
-rw-r--r--spec/lib/gitlab/robots_txt/parser_spec.rb71
-rw-r--r--spec/lib/gitlab/search/recent_issues_spec.rb11
-rw-r--r--spec/lib/gitlab/search/recent_merge_requests_spec.rb11
-rw-r--r--spec/lib/gitlab/search_results_spec.rb54
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/none_spec.rb29
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb4
-rw-r--r--spec/lib/gitlab/sql/except_spec.rb7
-rw-r--r--spec/lib/gitlab/sql/intersect_spec.rb7
-rw-r--r--spec/lib/gitlab/sql/union_spec.rb37
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config_spec.rb15
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb (renamed from spec/lib/gitlab/static_site_editor/config_spec.rb)48
-rw-r--r--spec/lib/gitlab/submodule_links_spec.rb58
-rw-r--r--spec/lib/gitlab/template/finders/global_template_finder_spec.rb19
-rw-r--r--spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb6
-rw-r--r--spec/lib/gitlab/tracking/incident_management_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking_spec.rb4
-rw-r--r--spec/lib/gitlab/updated_notes_paginator_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data/topology_spec.rb110
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb89
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb277
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb111
-rw-r--r--spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb23
-rw-r--r--spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb48
-rw-r--r--spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb (renamed from spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb)19
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb41
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb277
-rw-r--r--spec/lib/gitlab/utils/gzip_spec.rb58
-rw-r--r--spec/lib/gitlab/utils/markdown_spec.rb32
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb149
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/global_spec.rb5
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb8
191 files changed, 7864 insertions, 2035 deletions
diff --git a/spec/lib/gitlab/alert_management/alert_params_spec.rb b/spec/lib/gitlab/alert_management/alert_params_spec.rb
index 1fe27365c83..c3171be5e29 100644
--- a/spec/lib/gitlab/alert_management/alert_params_spec.rb
+++ b/spec/lib/gitlab/alert_management/alert_params_spec.rb
@@ -34,7 +34,9 @@ RSpec.describe Gitlab::AlertManagement::AlertParams do
hosts: ['gitlab.com'],
payload: payload,
started_at: started_at,
- fingerprint: nil
+ ended_at: nil,
+ fingerprint: nil,
+ environment: nil
)
end
diff --git a/spec/lib/gitlab/alert_management/payload/base_spec.rb b/spec/lib/gitlab/alert_management/payload/base_spec.rb
new file mode 100644
index 00000000000..e0f63bad05d
--- /dev/null
+++ b/spec/lib/gitlab/alert_management/payload/base_spec.rb
@@ -0,0 +1,210 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::AlertManagement::Payload::Base do
+ let_it_be(:project) { create(:project) }
+ let(:raw_payload) { {} }
+ let(:payload_class) { described_class }
+
+ subject(:parsed_payload) { payload_class.new(project: project, payload: raw_payload) }
+
+ describe '.attribute' do
+ subject { parsed_payload.test }
+
+ context 'with a single path provided' do
+ let(:payload_class) do
+ Class.new(described_class) do
+ attribute :test, paths: [['test']]
+ end
+ end
+
+ it { is_expected.to be_nil }
+
+ context 'and a matching value' do
+ let(:raw_payload) { { 'test' => 'value' } }
+
+ it { is_expected.to eq 'value' }
+ end
+ end
+
+ context 'with multiple paths provided' do
+ let(:payload_class) do
+ Class.new(described_class) do
+ attribute :test, paths: [['test'], %w(alt test)]
+ end
+ end
+
+ it { is_expected.to be_nil }
+
+ context 'and a matching value' do
+ let(:raw_payload) { { 'alt' => { 'test' => 'value' } } }
+
+ it { is_expected.to eq 'value' }
+ end
+ end
+
+ context 'with a fallback provided' do
+ let(:payload_class) do
+ Class.new(described_class) do
+ attribute :test, paths: [['test']], fallback: -> { 'fallback' }
+ end
+ end
+
+ it { is_expected.to eq('fallback') }
+
+ context 'and a matching value' do
+ let(:raw_payload) { { 'test' => 'value' } }
+
+ it { is_expected.to eq 'value' }
+ end
+ end
+
+ context 'with a time type provided' do
+ let(:test_time) { Time.current.change(usec: 0) }
+
+ let(:payload_class) do
+ Class.new(described_class) do
+ attribute :test, paths: [['test']], type: :time
+ end
+ end
+
+ it { is_expected.to be_nil }
+
+ context 'with a compatible matching value' do
+ let(:raw_payload) { { 'test' => test_time.to_s } }
+
+ it { is_expected.to eq test_time }
+ end
+
+ context 'with a value in rfc3339 format' do
+ let(:raw_payload) { { 'test' => test_time.rfc3339 } }
+
+ it { is_expected.to eq test_time }
+ end
+
+ context 'with an incompatible matching value' do
+ let(:raw_payload) { { 'test' => 'bad time' } }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ context 'with an integer type provided' do
+ let(:payload_class) do
+ Class.new(described_class) do
+ attribute :test, paths: [['test']], type: :integer
+ end
+ end
+
+ it { is_expected.to be_nil }
+
+ context 'with a compatible matching value' do
+ let(:raw_payload) { { 'test' => '15' } }
+
+ it { is_expected.to eq 15 }
+ end
+
+ context 'with an incompatible matching value' do
+ let(:raw_payload) { { 'test' => String } }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with an incompatible matching value' do
+ let(:raw_payload) { { 'test' => 'apple' } }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe '#alert_params' do
+ before do
+ allow(parsed_payload).to receive(:title).and_return('title')
+ allow(parsed_payload).to receive(:description).and_return('description')
+ end
+
+ subject { parsed_payload.alert_params }
+
+ it { is_expected.to eq({ description: 'description', project_id: project.id, title: 'title' }) }
+ end
+
+ describe '#gitlab_fingerprint' do
+ subject { parsed_payload.gitlab_fingerprint }
+
+ it { is_expected.to be_nil }
+
+ context 'when plain_gitlab_fingerprint is defined' do
+ before do
+ allow(parsed_payload)
+ .to receive(:plain_gitlab_fingerprint)
+ .and_return('fingerprint')
+ end
+
+ it 'returns a fingerprint' do
+ is_expected.to eq(Digest::SHA1.hexdigest('fingerprint'))
+ end
+ end
+ end
+
+ describe '#environment' do
+ let_it_be(:environment) { create(:environment, project: project, name: 'production') }
+
+ subject { parsed_payload.environment }
+
+ before do
+ allow(parsed_payload).to receive(:environment_name).and_return(environment_name)
+ end
+
+ context 'without an environment name' do
+ let(:environment_name) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with a non-matching environment name' do
+ let(:environment_name) { 'other_environment' }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with a matching environment name' do
+ let(:environment_name) { 'production' }
+
+ it { is_expected.to eq(environment) }
+ end
+ end
+
+ describe '#resolved?' do
+ before do
+ allow(parsed_payload).to receive(:status).and_return(status)
+ end
+
+ subject { parsed_payload.resolved? }
+
+ context 'when status is not defined' do
+ let(:status) { nil }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when status is not resovled' do
+ let(:status) { 'firing' }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when status is resovled' do
+ let(:status) { 'resolved' }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '#has_required_attributes?' do
+ subject { parsed_payload.has_required_attributes? }
+
+ it { is_expected.to be(true) }
+ end
+end
diff --git a/spec/lib/gitlab/alert_management/payload/generic_spec.rb b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
new file mode 100644
index 00000000000..538a822503e
--- /dev/null
+++ b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::AlertManagement::Payload::Generic do
+ let_it_be(:project) { build_stubbed(:project) }
+ let(:raw_payload) { {} }
+
+ let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
+
+ it_behaves_like 'subclass has expected api'
+
+ describe '#title' do
+ subject { parsed_payload.title }
+
+ it_behaves_like 'parsable alert payload field with fallback', 'New: Incident', 'title'
+ end
+
+ describe '#severity' do
+ subject { parsed_payload.severity }
+
+ it_behaves_like 'parsable alert payload field with fallback', 'critical', 'severity'
+ end
+
+ describe '#monitoring_tool' do
+ subject { parsed_payload.monitoring_tool }
+
+ it_behaves_like 'parsable alert payload field', 'monitoring_tool'
+ end
+
+ describe '#service' do
+ subject { parsed_payload.service }
+
+ it_behaves_like 'parsable alert payload field', 'service'
+ end
+
+ describe '#hosts' do
+ subject { parsed_payload.hosts }
+
+ it_behaves_like 'parsable alert payload field', 'hosts'
+ end
+
+ describe '#starts_at' do
+ let(:current_time) { Time.current.change(usec: 0).utc }
+
+ subject { parsed_payload.starts_at }
+
+ around do |example|
+ Timecop.freeze(current_time) { example.run }
+ end
+
+ context 'without start_time' do
+ it { is_expected.to eq(current_time) }
+ end
+
+ context "with start_time" do
+ let(:value) { 10.minutes.ago.change(usec: 0).utc }
+
+ before do
+ raw_payload['start_time'] = value.to_s
+ end
+
+ it { is_expected.to eq(value) }
+ end
+ end
+
+ describe '#runbook' do
+ subject { parsed_payload.runbook }
+
+ it_behaves_like 'parsable alert payload field', 'runbook'
+ end
+
+ describe '#gitlab_fingerprint' do
+ let(:plain_fingerprint) { 'fingerprint' }
+ let(:raw_payload) { { 'fingerprint' => plain_fingerprint } }
+
+ subject { parsed_payload.gitlab_fingerprint }
+
+ it 'returns a fingerprint' do
+ is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
+ end
+ end
+
+ describe '#environment_name' do
+ subject { parsed_payload.environment_name }
+
+ it_behaves_like 'parsable alert payload field', 'gitlab_environment_name'
+ end
+end
diff --git a/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb b/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb
new file mode 100644
index 00000000000..862b5b2bdc3
--- /dev/null
+++ b/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb
@@ -0,0 +1,167 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::AlertManagement::Payload::ManagedPrometheus do
+ let_it_be(:project) { create(:project) }
+ let(:raw_payload) { {} }
+
+ let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
+
+ it_behaves_like 'subclass has expected api'
+
+ shared_context 'with gitlab alert' do
+ let_it_be(:gitlab_alert) { create(:prometheus_alert, project: project) }
+ let(:metric_id) { gitlab_alert.prometheus_metric_id.to_s }
+ let(:alert_id) { gitlab_alert.id.to_s }
+ end
+
+ describe '#metric_id' do
+ subject { parsed_payload.metric_id }
+
+ it { is_expected.to be_nil }
+
+ context 'with gitlab_alert_id' do
+ let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => '12' } } }
+
+ it { is_expected.to eq(12) }
+ end
+ end
+
+ describe '#gitlab_prometheus_alert_id' do
+ subject { parsed_payload.gitlab_prometheus_alert_id }
+
+ it { is_expected.to be_nil }
+
+ context 'with gitlab_alert_id' do
+ let(:raw_payload) { { 'labels' => { 'gitlab_prometheus_alert_id' => '12' } } }
+
+ it { is_expected.to eq(12) }
+ end
+ end
+
+ describe '#gitlab_alert' do
+ subject { parsed_payload.gitlab_alert }
+
+ context 'without alert info in payload' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with metric id in payload' do
+ let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => metric_id } } }
+ let(:metric_id) { '-1' }
+
+ context 'without matching alert' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with matching alert' do
+ include_context 'with gitlab alert'
+
+ it { is_expected.to eq(gitlab_alert) }
+
+ context 'when unclear which alert applies' do
+ # With multiple alerts for different environments,
+ # we can't be sure which prometheus alert the payload
+ # belongs to
+ let_it_be(:another_alert) do
+ create(:prometheus_alert,
+ prometheus_metric: gitlab_alert.prometheus_metric,
+ project: project)
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
+ context 'with alert id' do
+ # gitlab_prometheus_alert_id is a stronger identifier,
+ # but was added after gitlab_alert_id; we won't
+ # see it without gitlab_alert_id also present
+ let(:raw_payload) do
+ {
+ 'labels' => {
+ 'gitlab_alert_id' => metric_id,
+ 'gitlab_prometheus_alert_id' => alert_id
+ }
+ }
+ end
+
+ context 'without matching alert' do
+ let(:alert_id) { '-1' }
+ let(:metric_id) { '-1' }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with matching alerts' do
+ include_context 'with gitlab alert'
+
+ it { is_expected.to eq(gitlab_alert) }
+ end
+ end
+ end
+
+ describe '#full_query' do
+ subject { parsed_payload.full_query }
+
+ it { is_expected.to be_nil }
+
+ context 'with gitlab alert' do
+ include_context 'with gitlab alert'
+
+ let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => metric_id } } }
+
+ it { is_expected.to eq(gitlab_alert.full_query) }
+ end
+
+ context 'with sufficient fallback info' do
+ let(:raw_payload) { { 'generatorURL' => 'http://localhost:9090/graph?g0.expr=vector%281%29' } }
+
+ it { is_expected.to eq('vector(1)') }
+ end
+ end
+
+ describe '#environment' do
+ subject { parsed_payload.environment }
+
+ context 'with gitlab alert' do
+ include_context 'with gitlab alert'
+
+ let(:raw_payload) { { 'labels' => { 'gitlab_alert_id' => metric_id } } }
+
+ it { is_expected.to eq(gitlab_alert.environment) }
+ end
+
+ context 'with sufficient fallback info' do
+ let_it_be(:environment) { create(:environment, project: project, name: 'production') }
+ let(:raw_payload) do
+ {
+ 'labels' => {
+ 'gitlab_alert_id' => '-1',
+ 'gitlab_environment_name' => 'production'
+ }
+ }
+ end
+
+ it { is_expected.to eq(environment) }
+ end
+ end
+
+ describe '#metrics_dashboard_url' do
+ subject { parsed_payload.metrics_dashboard_url }
+
+ context 'without alert' do
+ it { is_expected.to be_nil }
+ end
+
+ context 'with gitlab alert' do
+ include_context 'gitlab-managed prometheus alert attributes' do
+ let(:raw_payload) { payload }
+ end
+
+ it { is_expected.to eq(dashboard_url_for_alert) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
new file mode 100644
index 00000000000..457db58a28b
--- /dev/null
+++ b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
@@ -0,0 +1,240 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::AlertManagement::Payload::Prometheus do
+ let_it_be(:project) { create(:project) }
+ let(:raw_payload) { {} }
+
+ let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
+
+ it_behaves_like 'subclass has expected api'
+
+ shared_context 'with environment' do
+ let_it_be(:environment) { create(:environment, project: project, name: 'production') }
+ end
+
+ describe '#title' do
+ subject { parsed_payload.title }
+
+ it_behaves_like 'parsable alert payload field',
+ 'annotations/title',
+ 'annotations/summary',
+ 'labels/alertname'
+ end
+
+ describe '#description' do
+ subject { parsed_payload.description }
+
+ it_behaves_like 'parsable alert payload field', 'annotations/description'
+ end
+
+ describe '#annotations' do
+ subject { parsed_payload.annotations }
+
+ it_behaves_like 'parsable alert payload field', 'annotations'
+ end
+
+ describe '#status' do
+ subject { parsed_payload.status }
+
+ it_behaves_like 'parsable alert payload field', 'status'
+ end
+
+ describe '#starts_at' do
+ let(:current_time) { Time.current.utc }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ subject { parsed_payload.starts_at }
+
+ context 'without payload' do
+ it { is_expected.to eq(current_time) }
+ end
+
+ context "with startsAt" do
+ let(:value) { 10.minutes.ago.change(usec: 0).utc }
+ let(:raw_payload) { { 'startsAt' => value.rfc3339 } }
+
+ it { is_expected.to eq(value) }
+ end
+ end
+
+ describe '#ends_at' do
+ subject { parsed_payload.ends_at }
+
+ context 'without payload' do
+ it { is_expected.to be_nil }
+ end
+
+ context "with endsAt" do
+ let(:value) { Time.current.change(usec: 0).utc }
+ let(:raw_payload) { { 'endsAt' => value.rfc3339 } }
+
+ it { is_expected.to eq(value) }
+ end
+ end
+
+ describe '#generator_url' do
+ subject { parsed_payload.generator_url }
+
+ it_behaves_like 'parsable alert payload field', 'generatorURL'
+ end
+
+ describe '#runbook' do
+ subject { parsed_payload.runbook }
+
+ it_behaves_like 'parsable alert payload field', 'annotations/runbook'
+ end
+
+ describe '#alert_markdown' do
+ subject { parsed_payload.alert_markdown }
+
+ it_behaves_like 'parsable alert payload field', 'annotations/gitlab_incident_markdown'
+ end
+
+ describe '#environment_name' do
+ subject { parsed_payload.environment_name }
+
+ it_behaves_like 'parsable alert payload field', 'labels/gitlab_environment_name'
+ end
+
+ describe '#gitlab_y_label' do
+ subject { parsed_payload.gitlab_y_label }
+
+ it_behaves_like 'parsable alert payload field',
+ 'annotations/gitlab_y_label',
+ 'annotations/title',
+ 'annotations/summary',
+ 'labels/alertname'
+ end
+
+ describe '#monitoring_tool' do
+ subject { parsed_payload.monitoring_tool }
+
+ it { is_expected.to eq('Prometheus') }
+ end
+
+ describe '#full_query' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { parsed_payload.full_query }
+
+ where(:generator_url, :expected_query) do
+ nil | nil
+ 'http://localhost' | nil
+ 'invalid url' | nil
+ 'http://localhost:9090/graph?g1.expr=vector%281%29' | nil
+ 'http://localhost:9090/graph?g0.expr=vector%281%29' | 'vector(1)'
+ end
+
+ with_them do
+ let(:raw_payload) { { 'generatorURL' => generator_url } }
+
+ it { is_expected.to eq(expected_query) }
+ end
+ end
+
+ describe '#environment' do
+ subject { parsed_payload.environment }
+
+ it { is_expected.to be_nil }
+
+ context 'with environment_name' do
+ let(:raw_payload) { { 'labels' => { 'gitlab_environment_name' => 'production' } } }
+
+ it { is_expected.to be_nil }
+
+ context 'with matching environment' do
+ include_context 'with environment'
+
+ it { is_expected.to eq(environment) }
+ end
+ end
+ end
+
+ describe '#gitlab_fingerprint' do
+ subject { parsed_payload.gitlab_fingerprint }
+
+ let(:raw_payload) do
+ {
+ 'startsAt' => Time.current.to_s,
+ 'generatorURL' => 'http://localhost:9090/graph?g0.expr=vector%281%29',
+ 'annotations' => { 'title' => 'title' }
+ }
+ end
+
+ it 'returns a fingerprint' do
+ plain_fingerprint = [
+ parsed_payload.send(:starts_at_raw),
+ parsed_payload.title,
+ parsed_payload.full_query
+ ].join('/')
+
+ is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
+ end
+ end
+
+ describe '#metrics_dashboard_url' do
+ include_context 'self-managed prometheus alert attributes' do
+ let(:raw_payload) { payload }
+ end
+
+ subject { parsed_payload.metrics_dashboard_url }
+
+ it { is_expected.to eq(dashboard_url_for_alert) }
+
+ context 'without environment' do
+ let(:raw_payload) { payload.except('labels') }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'without full query' do
+ let(:raw_payload) { payload.except('generatorURL') }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'without title' do
+ let(:raw_payload) { payload.except('annotations') }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#has_required_attributes?' do
+ let(:starts_at) { Time.current.change(usec: 0).utc }
+ let(:raw_payload) { { 'annotations' => { 'title' => 'title' }, 'startsAt' => starts_at.rfc3339 } }
+
+ subject { parsed_payload.has_required_attributes? }
+
+ it { is_expected.to be_truthy }
+
+ context 'without project' do
+ let(:parsed_payload) { described_class.new(project: nil, payload: raw_payload) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'without title' do
+ let(:raw_payload) { { 'startsAt' => starts_at.rfc3339 } }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'without startsAt' do
+ let(:raw_payload) { { 'annotations' => { 'title' => 'title' } } }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'without payload' do
+ let(:parsed_payload) { described_class.new(project: project, payload: nil) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/alert_management/payload_spec.rb b/spec/lib/gitlab/alert_management/payload_spec.rb
new file mode 100644
index 00000000000..44b55e228c5
--- /dev/null
+++ b/spec/lib/gitlab/alert_management/payload_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::AlertManagement::Payload do
+ describe '#parse' do
+ let_it_be(:project) { build_stubbed(:project) }
+ let(:payload) { {} }
+
+ context 'without a monitoring_tool specified by caller' do
+ subject { described_class.parse(project, payload) }
+
+ context 'without a monitoring tool in the payload' do
+ it { is_expected.to be_a Gitlab::AlertManagement::Payload::Generic }
+ end
+
+ context 'with the payload specifying Prometheus' do
+ let(:payload) { { 'monitoring_tool' => 'Prometheus' } }
+
+ it { is_expected.to be_a Gitlab::AlertManagement::Payload::Prometheus }
+
+ context 'with gitlab-managed attributes' do
+ let(:payload) { { 'monitoring_tool' => 'Prometheus', 'labels' => { 'gitlab_alert_id' => '12' } } }
+
+ it { is_expected.to be_a Gitlab::AlertManagement::Payload::ManagedPrometheus }
+ end
+ end
+
+ context 'with the payload specifying an unknown tool' do
+ let(:payload) { { 'monitoring_tool' => 'Custom Tool' } }
+
+ it { is_expected.to be_a Gitlab::AlertManagement::Payload::Generic }
+ end
+ end
+
+ context 'with monitoring_tool specified by caller' do
+ subject { described_class.parse(project, payload, monitoring_tool: monitoring_tool) }
+
+ context 'as Prometheus' do
+ let(:monitoring_tool) { 'Prometheus' }
+
+ context 'with an externally managed prometheus payload' do
+ it { is_expected.to be_a Gitlab::AlertManagement::Payload::Prometheus }
+ end
+
+ context 'with a self-managed prometheus payload' do
+ let(:payload) { { 'labels' => { 'gitlab_alert_id' => '14' } } }
+
+ it { is_expected.to be_a Gitlab::AlertManagement::Payload::ManagedPrometheus }
+ end
+ end
+
+ context 'as an unknown tool' do
+ let(:monitoring_tool) { 'Custom Tool' }
+
+ it { is_expected.to be_a Gitlab::AlertManagement::Payload::Generic }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
index 0489108b159..ff5ab1116fa 100644
--- a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
+++ b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
@@ -7,10 +7,12 @@ RSpec.describe Gitlab::Alerting::NotificationPayloadParser do
describe '.call' do
let(:starts_at) { Time.current.change(usec: 0) }
+ let(:ends_at) { Time.current.change(usec: 0) }
let(:payload) do
{
'title' => 'alert title',
'start_time' => starts_at.rfc3339,
+ 'end_time' => ends_at.rfc3339,
'description' => 'Description',
'monitoring_tool' => 'Monitoring tool name',
'service' => 'Service',
@@ -32,7 +34,8 @@ RSpec.describe Gitlab::Alerting::NotificationPayloadParser do
'hosts' => ['gitlab.com'],
'severity' => 'low'
},
- 'startsAt' => starts_at.rfc3339
+ 'startsAt' => starts_at.rfc3339,
+ 'endsAt' => ends_at.rfc3339
}
)
end
@@ -124,11 +127,24 @@ RSpec.describe Gitlab::Alerting::NotificationPayloadParser do
end
end
+ context 'with environment' do
+ let(:environment) { create(:environment, project: project) }
+
+ before do
+ payload[:gitlab_environment_name] = environment.name
+ end
+
+ it 'sets the environment ' do
+ expect(subject.dig('annotations', 'environment')).to eq(environment)
+ end
+ end
+
context 'when payload attributes have blank lines' do
let(:payload) do
{
'title' => '',
'start_time' => '',
+ 'end_time' => '',
'description' => '',
'monitoring_tool' => '',
'service' => '',
diff --git a/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb b/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb
new file mode 100644
index 00000000000..d232e509e00
--- /dev/null
+++ b/spec/lib/gitlab/analytics/instance_statistics/workers_argument_builder_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Analytics::InstanceStatistics::WorkersArgumentBuilder do
+ context 'when no measurement identifiers are given' do
+ it 'returns empty array' do
+ expect(described_class.new(measurement_identifiers: []).execute).to be_empty
+ end
+ end
+
+ context 'when measurement identifiers are given' do
+ let_it_be(:user_1) { create(:user) }
+ let_it_be(:project_1) { create(:project, namespace: user_1.namespace, creator: user_1) }
+ let_it_be(:project_2) { create(:project, namespace: user_1.namespace, creator: user_1) }
+ let_it_be(:project_3) { create(:project, namespace: user_1.namespace, creator: user_1) }
+
+ let(:recorded_at) { 2.days.ago }
+ let(:projects_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:projects) }
+ let(:users_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:users) }
+ let(:measurement_identifiers) { [projects_measurement_identifier, users_measurement_identifier] }
+
+ subject { described_class.new(measurement_identifiers: measurement_identifiers, recorded_at: recorded_at).execute }
+
+ it 'returns worker arguments' do
+ expect(subject).to eq([
+ [projects_measurement_identifier, project_1.id, project_3.id, recorded_at],
+ [users_measurement_identifier, user_1.id, user_1.id, recorded_at]
+ ])
+ end
+
+ context 'when bogus measurement identifiers are given' do
+ before do
+ measurement_identifiers << 'bogus1'
+ measurement_identifiers << 'bogus2'
+ end
+
+ it 'skips bogus measurement identifiers' do
+ expect(subject).to eq([
+ [projects_measurement_identifier, project_1.id, project_3.id, recorded_at],
+ [users_measurement_identifier, user_1.id, user_1.id, recorded_at]
+ ])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/anonymous_session_spec.rb b/spec/lib/gitlab/anonymous_session_spec.rb
index 0f0795cd9fc..671d452ad13 100644
--- a/spec/lib/gitlab/anonymous_session_spec.rb
+++ b/spec/lib/gitlab/anonymous_session_spec.rb
@@ -8,45 +8,36 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
subject { new_anonymous_session }
- def new_anonymous_session(session_id = default_session_id)
- described_class.new('127.0.0.1', session_id: session_id)
+ def new_anonymous_session
+ described_class.new('127.0.0.1')
end
- describe '#store_session_id_per_ip' do
+ describe '#store_session_ip' do
it 'adds session id to proper key' do
- subject.store_session_id_per_ip
+ subject.count_session_ip
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.smembers("session:lookup:ip:gitlab:127.0.0.1")).to eq [default_session_id]
+ expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq 1
end
end
it 'adds expiration time to key' do
Timecop.freeze do
- subject.store_session_id_per_ip
+ subject.count_session_ip
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.ttl("session:lookup:ip:gitlab:127.0.0.1")).to eq(24.hours.to_i)
+ expect(redis.ttl("session:lookup:ip:gitlab2:127.0.0.1")).to eq(24.hours.to_i)
end
end
end
- it 'adds id only once' do
- subject.store_session_id_per_ip
- subject.store_session_id_per_ip
-
- Gitlab::Redis::SharedState.with do |redis|
- expect(redis.smembers("session:lookup:ip:gitlab:127.0.0.1")).to eq [default_session_id]
- end
- end
-
context 'when there is already one session' do
- it 'adds session id to proper key' do
- subject.store_session_id_per_ip
- new_anonymous_session(additional_session_id).store_session_id_per_ip
+ it 'increments the session count' do
+ subject.count_session_ip
+ new_anonymous_session.count_session_ip
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.smembers("session:lookup:ip:gitlab:127.0.0.1")).to contain_exactly(default_session_id, additional_session_id)
+ expect(redis.get("session:lookup:ip:gitlab2:127.0.0.1").to_i).to eq(2)
end
end
end
@@ -55,24 +46,22 @@ RSpec.describe Gitlab::AnonymousSession, :clean_gitlab_redis_shared_state do
describe '#stored_sessions' do
it 'returns all anonymous sessions per ip' do
Gitlab::Redis::SharedState.with do |redis|
- redis.sadd("session:lookup:ip:gitlab:127.0.0.1", default_session_id)
- redis.sadd("session:lookup:ip:gitlab:127.0.0.1", additional_session_id)
+ redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2)
end
- expect(subject.stored_sessions).to eq(2)
+ expect(subject.session_count).to eq(2)
end
end
it 'removes obsolete lookup through ip entries' do
Gitlab::Redis::SharedState.with do |redis|
- redis.sadd("session:lookup:ip:gitlab:127.0.0.1", default_session_id)
- redis.sadd("session:lookup:ip:gitlab:127.0.0.1", additional_session_id)
+ redis.set("session:lookup:ip:gitlab2:127.0.0.1", 2)
end
- subject.cleanup_session_per_ip_entries
+ subject.cleanup_session_per_ip_count
Gitlab::Redis::SharedState.with do |redis|
- expect(redis.smembers("session:lookup:ip:gitlab:127.0.0.1")).to eq [additional_session_id]
+ expect(redis.exists("session:lookup:ip:gitlab2:127.0.0.1")).to eq(false)
end
end
end
diff --git a/spec/lib/gitlab/app_text_logger_spec.rb b/spec/lib/gitlab/app_text_logger_spec.rb
index 04c2e946640..e8bee0f9903 100644
--- a/spec/lib/gitlab/app_text_logger_spec.rb
+++ b/spec/lib/gitlab/app_text_logger_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::AppTextLogger do
end
it 'logs time in UTC with ISO8601.3 standard' do
- Timecop.freeze do
+ freeze_time do
expect(subject.format_message('INFO', Time.now, nil, string_message))
.to include(Time.now.utc.iso8601(3))
end
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 1b669e691e7..6b93634690c 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -404,7 +404,7 @@ module Gitlab
++++
stem:[2+2] is 4
- MD
+ MD
expect(render(input, context)).to include('<pre data-math-style="display" class="code math js-render-math"><code>eta_x gamma</code></pre>')
expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>')
diff --git a/spec/lib/gitlab/auth/atlassian/auth_hash_spec.rb b/spec/lib/gitlab/auth/atlassian/auth_hash_spec.rb
new file mode 100644
index 00000000000..c57b15361c4
--- /dev/null
+++ b/spec/lib/gitlab/auth/atlassian/auth_hash_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Auth::Atlassian::AuthHash do
+ let(:auth_hash) do
+ described_class.new(
+ OmniAuth::AuthHash.new(uid: 'john', credentials: credentials)
+ )
+ end
+
+ let(:credentials) do
+ {
+ token: 'super_secret_token',
+ refresh_token: 'super_secret_refresh_token',
+ expires_at: 2.weeks.from_now.to_i,
+ expires: true
+ }
+ end
+
+ describe '#uid' do
+ it 'returns the correct uid' do
+ expect(auth_hash.uid).to eq('john')
+ end
+ end
+
+ describe '#token' do
+ it 'returns the correct token' do
+ expect(auth_hash.token).to eq(credentials[:token])
+ end
+ end
+
+ describe '#refresh_token' do
+ it 'returns the correct refresh token' do
+ expect(auth_hash.refresh_token).to eq(credentials[:refresh_token])
+ end
+ end
+
+ describe '#token' do
+ it 'returns the correct expires boolean' do
+ expect(auth_hash.expires?).to eq(credentials[:expires])
+ end
+ end
+
+ describe '#token' do
+ it 'returns the correct expiration' do
+ expect(auth_hash.expires_at).to eq(credentials[:expires_at])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb b/spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb
new file mode 100644
index 00000000000..ca6b91ac6f1
--- /dev/null
+++ b/spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Auth::Atlassian::IdentityLinker do
+ let(:user) { create(:user) }
+ let(:extern_uid) { generate(:username) }
+ let(:oauth) do
+ OmniAuth::AuthHash.new(
+ uid: extern_uid,
+ provider: 'atlassian_oauth2',
+ info: { name: 'John', email: 'john@mail.com' },
+ credentials: credentials
+ )
+ end
+
+ let(:credentials) do
+ {
+ token: SecureRandom.alphanumeric(1254),
+ refresh_token: SecureRandom.alphanumeric(45),
+ expires_at: 2.weeks.from_now.to_i,
+ expires: true
+ }
+ end
+
+ subject { described_class.new(user, oauth) }
+
+ context 'linked identity exists' do
+ let!(:identity) { create(:atlassian_identity, user: user, extern_uid: extern_uid) }
+
+ before do
+ subject.link
+ end
+
+ it 'sets #changed? to false' do
+ expect(subject).not_to be_changed
+ end
+
+ it 'does not mark as failed' do
+ expect(subject).not_to be_failed
+ end
+ end
+
+ context 'identity already linked to different user' do
+ let!(:identity) { create(:atlassian_identity, extern_uid: extern_uid) }
+
+ it 'sets #changed? to false' do
+ subject.link
+
+ expect(subject).not_to be_changed
+ end
+
+ it 'exposes error message' do
+ expect(subject.error_message).to eq 'Extern uid has already been taken'
+ end
+ end
+
+ context 'identity needs to be created' do
+ let(:identity) { user.atlassian_identity }
+
+ before do
+ subject.link
+ end
+
+ it_behaves_like 'an atlassian identity'
+
+ it 'sets #changed? to true' do
+ expect(subject).to be_changed
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/atlassian/user_spec.rb b/spec/lib/gitlab/auth/atlassian/user_spec.rb
new file mode 100644
index 00000000000..1db01102bc2
--- /dev/null
+++ b/spec/lib/gitlab/auth/atlassian/user_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Auth::Atlassian::User do
+ let(:oauth_user) { described_class.new(oauth) }
+ let(:gl_user) { oauth_user.gl_user }
+ let(:extern_uid) { generate(:username) }
+ let(:oauth) do
+ OmniAuth::AuthHash.new(
+ uid: extern_uid,
+ provider: 'atlassian_oauth2',
+ info: { name: 'John', email: 'john@mail.com' },
+ credentials: credentials)
+ end
+
+ let(:credentials) do
+ {
+ token: SecureRandom.alphanumeric(1254),
+ refresh_token: SecureRandom.alphanumeric(45),
+ expires_at: 2.weeks.from_now.to_i,
+ expires: true
+ }
+ end
+
+ describe '.assign_identity_from_auth_hash!' do
+ let(:auth_hash) { ::Gitlab::Auth::Atlassian::AuthHash.new(oauth) }
+ let(:identity) { described_class.assign_identity_from_auth_hash!(Atlassian::Identity.new, auth_hash) }
+
+ it_behaves_like 'an atlassian identity'
+ end
+
+ describe '#save' do
+ context 'for an existing user' do
+ context 'with an existing Atlassian Identity' do
+ let!(:existing_user) { create(:atlassian_user, extern_uid: extern_uid) }
+ let(:identity) { gl_user.atlassian_identity }
+
+ before do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+ end
+
+ it 'finds the existing user and identity' do
+ expect(gl_user.id).to eq(existing_user.id)
+ expect(identity.id).to eq(existing_user.atlassian_identity.id)
+ end
+
+ it_behaves_like 'an atlassian identity'
+ end
+
+ context 'for a new user' do
+ it 'creates the user and identity' do
+ oauth_user.save # rubocop:disable Rails/SaveBang
+
+ expect(gl_user).to be_valid
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/ldap/adapter_spec.rb b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
index 78970378b7f..8546d63cf77 100644
--- a/spec/lib/gitlab/auth/ldap/adapter_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/adapter_spec.rb
@@ -128,7 +128,7 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
before do
allow(adapter).to receive(:renew_connection_adapter).and_return(ldap)
allow(ldap).to receive(:search) { raise Net::LDAP::Error, "some error" }
- allow(Rails.logger).to receive(:warn)
+ allow(Gitlab::AppLogger).to receive(:warn)
end
context 'retries the operation' do
@@ -152,7 +152,7 @@ RSpec.describe Gitlab::Auth::Ldap::Adapter do
it 'logs the error' do
expect { subject }.to raise_error(Gitlab::Auth::Ldap::LdapConnectionError)
- expect(Rails.logger).to have_received(:warn).with(
+ expect(Gitlab::AppLogger).to have_received(:warn).with(
"LDAP search raised exception Net::LDAP::Error: some error")
end
end
diff --git a/spec/lib/gitlab/auth/ldap/config_spec.rb b/spec/lib/gitlab/auth/ldap/config_spec.rb
index 4287596af8f..e4c87a54365 100644
--- a/spec/lib/gitlab/auth/ldap/config_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/config_spec.rb
@@ -168,7 +168,7 @@ AtlErSqafbECNDSwS5BX8yDpu5yRBJ4xegO/rNlmb8ICRYkuJapD1xXicFOsmfUK
end
it 'logs an error when an invalid key or cert are configured' do
- allow(Rails.logger).to receive(:error)
+ allow(Gitlab::AppLogger).to receive(:error)
stub_ldap_config(
options: {
'host' => 'ldap.example.com',
@@ -183,7 +183,7 @@ AtlErSqafbECNDSwS5BX8yDpu5yRBJ4xegO/rNlmb8ICRYkuJapD1xXicFOsmfUK
config.adapter_options
- expect(Rails.logger).to have_received(:error).with(/LDAP TLS Options/).twice
+ expect(Gitlab::AppLogger).to have_received(:error).with(/LDAP TLS Options/).twice
end
context 'when verify_certificates is enabled' do
diff --git a/spec/lib/gitlab/auth/o_auth/provider_spec.rb b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
index 658a9976cc2..57f17365190 100644
--- a/spec/lib/gitlab/auth/o_auth/provider_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/provider_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
end
end
- describe '#config_for' do
+ describe '.config_for' do
context 'for an LDAP provider' do
context 'when the provider exists' do
it 'returns the config' do
@@ -91,4 +91,46 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
end
end
end
+
+ describe '.label_for' do
+ subject { described_class.label_for(name) }
+
+ context 'when configuration specifies a custom label' do
+ let(:name) { 'google_oauth2' }
+ let(:label) { 'Custom Google Provider' }
+ let(:provider) { OpenStruct.new({ 'name' => name, 'label' => label }) }
+
+ before do
+ stub_omniauth_setting(providers: [provider])
+ end
+
+ it 'returns the custom label name' do
+ expect(subject).to eq(label)
+ end
+ end
+
+ context 'when configuration does not specify a custom label' do
+ let(:provider) { OpenStruct.new({ 'name' => name } ) }
+
+ before do
+ stub_omniauth_setting(providers: [provider])
+ end
+
+ context 'when the name does not correspond to a label mapping' do
+ let(:name) { 'twitter' }
+
+ it 'returns the titleized name' do
+ expect(subject).to eq(name.titleize)
+ end
+ end
+ end
+
+ context 'when the name corresponds to a label mapping' do
+ let(:name) { 'gitlab' }
+
+ it 'returns the mapped name' do
+ expect(subject).to eq('GitLab.com')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 12e774ec1f8..243d0a4cb45 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -202,7 +202,56 @@ RSpec.describe Gitlab::Auth::OAuth::User do
include_examples "to verify compliance with allow_single_sign_on"
end
- context "with auto_link_user enabled" do
+ context "with auto_link_user enabled for a different provider" do
+ before do
+ stub_omniauth_config(auto_link_user: ['saml'])
+ end
+
+ context "and a current GitLab user with a matching email" do
+ let!(:existing_user) { create(:user, email: 'john@mail.com', username: 'john') }
+
+ it "adds the OmniAuth identity to the GitLab user account" do
+ oauth_user.save
+
+ expect(gl_user).not_to be_valid
+ end
+ end
+
+ context "and no current GitLab user with a matching email" do
+ include_examples "to verify compliance with allow_single_sign_on"
+ end
+ end
+
+ context "with auto_link_user enabled for the correct provider" do
+ before do
+ stub_omniauth_config(auto_link_user: ['twitter'])
+ end
+
+ context "and a current GitLab user with a matching email" do
+ let!(:existing_user) { create(:user, email: 'john@mail.com', username: 'john') }
+
+ it "adds the OmniAuth identity to the GitLab user account" do
+ oauth_user.save
+
+ expect(gl_user).to be_valid
+ expect(gl_user.username).to eql 'john'
+ expect(gl_user.email).to eql 'john@mail.com'
+ expect(gl_user.identities.length).to be 1
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to match_array(
+ [
+ { provider: 'twitter', extern_uid: uid }
+ ]
+ )
+ end
+ end
+
+ context "and no current GitLab user with a matching email" do
+ include_examples "to verify compliance with allow_single_sign_on"
+ end
+ end
+
+ context "with auto_link_user enabled for all providers" do
before do
stub_omniauth_config(auto_link_user: true)
end
@@ -421,7 +470,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do
context "with both auto_link_user and auto_link_ldap_user enabled" do
before do
- stub_omniauth_config(auto_link_user: true, auto_link_ldap_user: true)
+ stub_omniauth_config(auto_link_user: ['twitter'], auto_link_ldap_user: true)
end
context "and at least one LDAP provider is defined" do
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index b6a8ac31074..74360637897 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -358,6 +358,29 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
end
end
+
+ context 'when using a project access token' do
+ let_it_be(:project_bot_user) { create(:user, :project_bot) }
+ let_it_be(:project_access_token) { create(:personal_access_token, user: project_bot_user) }
+
+ context 'with valid project access token' do
+ before_all do
+ project.add_maintainer(project_bot_user)
+ end
+
+ it 'succeeds' do
+ expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: project, ip: 'ip'))
+ .to eq(Gitlab::Auth::Result.new(project_bot_user, nil, :personal_access_token, described_class.full_authentication_abilities))
+ end
+ end
+
+ context 'with invalid project access token' do
+ it 'fails' do
+ expect(gl_auth.find_for_git_client(project_bot_user.username, project_access_token.token, project: project, ip: 'ip'))
+ .to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ end
+ end
+ end
end
context 'while using regular user and password' do
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index fad33265030..a23b74bcaca 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -327,6 +327,6 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
end
def ls_files(snippet)
- raw_repository(snippet).ls_files(nil)
+ raw_repository(snippet).ls_files(snippet.default_branch)
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb b/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb
new file mode 100644
index 00000000000..0f7bb06e830
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_to_hashed_storage_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# rubocop:disable RSpec/FactoriesInMigrationSpecs
+RSpec.describe Gitlab::BackgroundMigration::MigrateToHashedStorage, :sidekiq, :redis do
+ let(:migrator) { Gitlab::HashedStorage::Migrator.new }
+
+ subject(:background_migration) { described_class.new }
+
+ describe '#perform' do
+ let!(:project) { create(:project, :empty_repo, :legacy_storage) }
+
+ context 'with pending rollback' do
+ it 'aborts rollback operation' do
+ Sidekiq::Testing.disable! do
+ Sidekiq::Client.push(
+ 'queue' => ::HashedStorage::ProjectRollbackWorker.queue,
+ 'class' => ::HashedStorage::ProjectRollbackWorker,
+ 'args' => [project.id]
+ )
+
+ expect { background_migration.perform }.to change { migrator.rollback_pending? }.from(true).to(false)
+ end
+ end
+ end
+
+ it 'enqueues legacy projects to be migrated' do
+ Sidekiq::Testing.fake! do
+ expect { background_migration.perform }.to change { Sidekiq::Queues[::HashedStorage::MigratorWorker.queue].size }.by(1)
+ end
+ end
+
+ context 'when executing all jobs' do
+ it 'migrates legacy projects' do
+ Sidekiq::Testing.inline! do
+ expect { background_migration.perform }.to change { project.reload.legacy_storage? }.from(true).to(false)
+ end
+ end
+ end
+ end
+end
+# rubocop:enable RSpec/FactoriesInMigrationSpecs
diff --git a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb
index 6e9f51f510a..f23518625e4 100644
--- a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb
+++ b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb
@@ -13,11 +13,11 @@ RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schem
let(:project) { projects.create!(namespace_id: namespace.id) }
let(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) }
- it 'fills the files_count column' do
- empty_diff = merge_request_diffs.create!(merge_request_id: merge_request.id)
- filled_diff = merge_request_diffs.create!(merge_request_id: merge_request.id)
+ let!(:empty_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) }
+ let!(:filled_diff) { merge_request_diffs.create!(merge_request_id: merge_request.id) }
- 3.times do |n|
+ let!(:filled_diff_files) do
+ 1.upto(3).map do |n|
merge_request_diff_files.create!(
merge_request_diff_id: filled_diff.id,
relative_order: n,
@@ -31,10 +31,21 @@ RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schem
new_path: ''
)
end
+ end
+ it 'fills the files_count column' do
described_class.new.perform(empty_diff.id, filled_diff.id)
expect(empty_diff.reload.files_count).to eq(0)
expect(filled_diff.reload.files_count).to eq(3)
end
+
+ it 'uses the sentinel value if the actual count is too high' do
+ stub_const("#{described_class}::FILES_COUNT_SENTINEL", filled_diff_files.size - 1)
+
+ described_class.new.perform(empty_diff.id, filled_diff.id)
+
+ expect(empty_diff.reload.files_count).to eq(0)
+ expect(filled_diff.reload.files_count).to eq(described_class::FILES_COUNT_SENTINEL)
+ end
end
diff --git a/spec/lib/gitlab/badge/coverage/template_spec.rb b/spec/lib/gitlab/badge/coverage/template_spec.rb
index 5a0adfd8e59..ba5c1b2ce6e 100644
--- a/spec/lib/gitlab/badge/coverage/template_spec.rb
+++ b/spec/lib/gitlab/badge/coverage/template_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Badge::Coverage::Template do
context 'when its size is larger than the max allowed value' do
before do
- allow(badge).to receive(:customization).and_return({ key_text: 't' * 129 })
+ allow(badge).to receive(:customization).and_return({ key_text: 't' * 65 })
end
it 'returns default value' do
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::Badge::Coverage::Template do
context 'when it is larger than the max allowed value' do
before do
- allow(badge).to receive(:customization).and_return({ key_width: 129 })
+ allow(badge).to receive(:customization).and_return({ key_width: 513 })
end
it 'returns default value' do
diff --git a/spec/lib/gitlab/badge/pipeline/status_spec.rb b/spec/lib/gitlab/badge/pipeline/status_spec.rb
index fcc0d4030fd..b5dabca0477 100644
--- a/spec/lib/gitlab/badge/pipeline/status_spec.rb
+++ b/spec/lib/gitlab/badge/pipeline/status_spec.rb
@@ -78,6 +78,34 @@ RSpec.describe Gitlab::Badge::Pipeline::Status do
expect(badge.status).to eq 'success'
end
end
+
+ context 'when ignored_skipped is set to true' do
+ let(:new_badge) { described_class.new(project, branch, opts: { ignore_skipped: true }) }
+
+ before do
+ pipeline.skip!
+ end
+
+ describe '#status' do
+ it 'uses latest non-skipped status' do
+ expect(new_badge.status).not_to eq 'skipped'
+ end
+ end
+ end
+
+ context 'when ignored_skipped is set to false' do
+ let(:new_badge) { described_class.new(project, branch, opts: { ignore_skipped: false }) }
+
+ before do
+ pipeline.skip!
+ end
+
+ describe '#status' do
+ it 'uses latest status' do
+ expect(new_badge.status).to eq 'skipped'
+ end
+ end
+ end
end
context 'build does not exist' do
diff --git a/spec/lib/gitlab/badge/pipeline/template_spec.rb b/spec/lib/gitlab/badge/pipeline/template_spec.rb
index 2f0d0782369..c78e95852f3 100644
--- a/spec/lib/gitlab/badge/pipeline/template_spec.rb
+++ b/spec/lib/gitlab/badge/pipeline/template_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Badge::Pipeline::Template do
context 'when its size is larger than the max allowed value' do
before do
- allow(badge).to receive(:customization).and_return({ key_text: 't' * 129 })
+ allow(badge).to receive(:customization).and_return({ key_text: 't' * 65 })
end
it 'returns default value' do
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::Badge::Pipeline::Template do
context 'when it is larger than the max allowed value' do
before do
- allow(badge).to receive(:customization).and_return({ key_width: 129 })
+ allow(badge).to receive(:customization).and_return({ key_width: 513 })
end
it 'returns default value' do
diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
index 5eb27c51f9e..80ec5ec1fc7 100644
--- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb
@@ -6,9 +6,10 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
include ImportSpecHelper
let(:import_url) { 'http://my-bitbucket' }
- let(:user) { 'bitbucket' }
+ let(:bitbucket_user) { 'bitbucket' }
+ let(:project_creator) { create(:user, username: 'project_creator', email: 'project_creator@example.org') }
let(:password) { 'test' }
- let(:project) { create(:project, :repository, import_url: import_url) }
+ let(:project) { create(:project, :repository, import_url: import_url, creator: project_creator) }
let(:now) { Time.now.utc.change(usec: 0) }
let(:project_key) { 'TEST' }
let(:repo_slug) { 'rouge' }
@@ -19,7 +20,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
before do
data = project.create_or_update_import_data(
data: { project_key: project_key, repo_slug: repo_slug },
- credentials: { base_uri: import_url, user: user, password: password }
+ credentials: { base_uri: import_url, user: bitbucket_user, password: password }
)
data.save
project.save
@@ -51,12 +52,11 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
end
describe '#import_pull_requests' do
- before do
- allow(subject).to receive(:import_repository)
- allow(subject).to receive(:delete_temp_branches)
- allow(subject).to receive(:restore_branches)
+ let(:pull_request_author) { create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org') }
+ let(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
- pull_request = instance_double(
+ let(:pull_request) do
+ instance_double(
BitbucketServer::Representation::PullRequest,
iid: 10,
source_branch_sha: sample.commits.last,
@@ -67,65 +67,172 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
description: 'This is a test pull request',
state: 'merged',
author: 'Test Author',
- author_email: project.owner.email,
+ author_email: pull_request_author.email,
+ author_username: pull_request_author.username,
created_at: Time.now,
updated_at: Time.now,
raw: {},
merged?: true)
+ end
- allow(subject.client).to receive(:pull_requests).and_return([pull_request])
-
- @merge_event = instance_double(
+ let(:merge_event) do
+ instance_double(
BitbucketServer::Representation::Activity,
comment?: false,
merge_event?: true,
- committer_email: project.owner.email,
+ committer_email: pull_request_author.email,
merge_timestamp: now,
merge_commit: '12345678'
)
+ end
- @pr_note = instance_double(
+ let(:pr_note) do
+ instance_double(
BitbucketServer::Representation::Comment,
note: 'Hello world',
- author_email: 'unknown@gmail.com',
- author_username: 'The Flash',
+ author_email: note_author.email,
+ author_username: note_author.username,
comments: [],
created_at: now,
updated_at: now,
parent_comment: nil)
+ end
- @pr_comment = instance_double(
+ let(:pr_comment) do
+ instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: false,
merge_event?: false,
- comment: @pr_note)
+ comment: pr_note)
+ end
+
+ before do
+ allow(subject).to receive(:import_repository)
+ allow(subject).to receive(:delete_temp_branches)
+ allow(subject).to receive(:restore_branches)
+
+ allow(subject.client).to receive(:pull_requests).and_return([pull_request])
end
it 'imports merge event' do
- expect(subject.client).to receive(:activities).and_return([@merge_event])
+ expect(subject.client).to receive(:activities).and_return([merge_event])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
- expect(merge_request.metrics.merged_by).to eq(project.owner)
- expect(merge_request.metrics.merged_at).to eq(@merge_event.merge_timestamp)
+ expect(merge_request.metrics.merged_by).to eq(pull_request_author)
+ expect(merge_request.metrics.merged_at).to eq(merge_event.merge_timestamp)
expect(merge_request.merge_commit_sha).to eq('12345678')
expect(merge_request.state_id).to eq(3)
end
- it 'imports comments' do
- expect(subject.client).to receive(:activities).and_return([@pr_comment])
+ describe 'pull request author user mapping' do
+ before do
+ allow(subject.client).to receive(:activities).and_return([merge_event])
+ end
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
+ shared_examples 'imports pull requests' do
+ it 'maps user' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
- merge_request = MergeRequest.first
- expect(merge_request.notes.count).to eq(1)
- note = merge_request.notes.first
- expect(note.note).to end_with(@pr_note.note)
- expect(note.author).to eq(project.owner)
- expect(note.created_at).to eq(@pr_note.created_at)
- expect(note.updated_at).to eq(@pr_note.created_at)
+ merge_request = MergeRequest.first
+ expect(merge_request.author).to eq(pull_request_author)
+ end
+ end
+
+ context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
+ end
+
+ include_examples 'imports pull requests'
+ end
+
+ context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do
+ before do
+ stub_feature_flags(bitbucket_server_user_mapping_by_username: true)
+ end
+
+ include_examples 'imports pull requests' do
+ context 'when username is not present' do
+ before do
+ allow(pull_request).to receive(:author_username).and_return(nil)
+ end
+
+ it 'maps by email' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request = MergeRequest.first
+ expect(merge_request.author).to eq(pull_request_author)
+ end
+ end
+ end
+ end
+
+ context 'when user is not found' do
+ before do
+ allow(pull_request).to receive(:author_username).and_return(nil)
+ allow(pull_request).to receive(:author_email).and_return(nil)
+ end
+
+ it 'maps importer user' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request = MergeRequest.first
+ expect(merge_request.author).to eq(project_creator)
+ end
+ end
+ end
+
+ describe 'comments' do
+ shared_examples 'imports comments' do
+ it 'imports comments' do
+ expect(subject.client).to receive(:activities).and_return([pr_comment])
+
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request = MergeRequest.first
+ expect(merge_request.notes.count).to eq(1)
+ note = merge_request.notes.first
+ expect(note.note).to end_with(pr_note.note)
+ expect(note.author).to eq(note_author)
+ expect(note.created_at).to eq(pr_note.created_at)
+ expect(note.updated_at).to eq(pr_note.created_at)
+ end
+ end
+
+ context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
+ end
+
+ include_examples 'imports comments'
+ end
+
+ context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do
+ before do
+ stub_feature_flags(bitbucket_server_user_mapping_by_username: true)
+ end
+
+ include_examples 'imports comments'
+
+ context 'when username is not present' do
+ before do
+ allow(pr_note).to receive(:author_username).and_return(nil)
+ allow(subject.client).to receive(:activities).and_return([pr_comment])
+ end
+
+ it 'maps by email' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request = MergeRequest.first
+ expect(merge_request.notes.count).to eq(1)
+ note = merge_request.notes.first
+ expect(note.author).to eq(note_author)
+ end
+ end
+ end
end
context 'metrics' do
@@ -135,7 +242,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
before do
allow(Gitlab::Metrics).to receive(:counter) { counter }
allow(Gitlab::Metrics).to receive(:histogram) { histogram }
- allow(subject.client).to receive(:activities).and_return([@merge_event])
+ allow(subject.client).to receive(:activities).and_return([merge_event])
end
it 'counts and measures duration of imported projects' do
@@ -170,73 +277,137 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
end
end
- it 'imports threaded discussions' do
- reply = instance_double(
- BitbucketServer::Representation::PullRequestComment,
- author_email: 'someuser@gitlab.com',
- author_username: 'Batman',
- note: 'I agree',
- created_at: now,
- updated_at: now)
+ describe 'threaded discussions' do
+ let(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') }
+ let(:inline_note_author) { create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org') }
+
+ let(:reply) do
+ instance_double(
+ BitbucketServer::Representation::PullRequestComment,
+ author_email: reply_author.email,
+ author_username: reply_author.username,
+ note: 'I agree',
+ created_at: now,
+ updated_at: now)
+ end
# https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
- inline_note = instance_double(
- BitbucketServer::Representation::PullRequestComment,
- file_type: 'ADDED',
- from_sha: sample.commits.first,
- to_sha: sample.commits.last,
- file_path: '.gitmodules',
- old_pos: nil,
- new_pos: 4,
- note: 'Hello world',
- author_email: 'unknown@gmail.com',
- author_username: 'Superman',
- comments: [reply],
- created_at: now,
- updated_at: now,
- parent_comment: nil)
+ let(:inline_note) do
+ instance_double(
+ BitbucketServer::Representation::PullRequestComment,
+ file_type: 'ADDED',
+ from_sha: sample.commits.first,
+ to_sha: sample.commits.last,
+ file_path: '.gitmodules',
+ old_pos: nil,
+ new_pos: 4,
+ note: 'Hello world',
+ author_email: inline_note_author.email,
+ author_username: inline_note_author.username,
+ comments: [reply],
+ created_at: now,
+ updated_at: now,
+ parent_comment: nil)
+ end
- allow(reply).to receive(:parent_comment).and_return(inline_note)
+ let(:inline_comment) do
+ instance_double(
+ BitbucketServer::Representation::Activity,
+ comment?: true,
+ inline_comment?: true,
+ merge_event?: false,
+ comment: inline_note)
+ end
- inline_comment = instance_double(
- BitbucketServer::Representation::Activity,
- comment?: true,
- inline_comment?: true,
- merge_event?: false,
- comment: inline_note)
+ before do
+ allow(reply).to receive(:parent_comment).and_return(inline_note)
+ allow(subject.client).to receive(:activities).and_return([inline_comment])
+ end
- expect(subject.client).to receive(:activities).and_return([inline_comment])
+ shared_examples 'imports threaded discussions' do
+ it 'imports threaded discussions' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ merge_request = MergeRequest.first
+ expect(merge_request.notes.count).to eq(2)
+ expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
+
+ notes = merge_request.notes.order(:id).to_a
+ start_note = notes.first
+ expect(start_note.type).to eq('DiffNote')
+ expect(start_note.note).to end_with(inline_note.note)
+ expect(start_note.created_at).to eq(inline_note.created_at)
+ expect(start_note.updated_at).to eq(inline_note.updated_at)
+ expect(start_note.position.base_sha).to eq(inline_note.from_sha)
+ expect(start_note.position.start_sha).to eq(inline_note.from_sha)
+ expect(start_note.position.head_sha).to eq(inline_note.to_sha)
+ expect(start_note.position.old_line).to be_nil
+ expect(start_note.position.new_line).to eq(inline_note.new_pos)
+ expect(start_note.author).to eq(inline_note_author)
+
+ reply_note = notes.last
+ # Make sure author and reply context is included
+ expect(reply_note.note).to start_with("> #{inline_note.note}\n\n#{reply.note}")
+ expect(reply_note.author).to eq(reply_author)
+ expect(reply_note.created_at).to eq(reply.created_at)
+ expect(reply_note.updated_at).to eq(reply.created_at)
+ expect(reply_note.position.base_sha).to eq(inline_note.from_sha)
+ expect(reply_note.position.start_sha).to eq(inline_note.from_sha)
+ expect(reply_note.position.head_sha).to eq(inline_note.to_sha)
+ expect(reply_note.position.old_line).to be_nil
+ expect(reply_note.position.new_line).to eq(inline_note.new_pos)
+ end
+ end
- expect { subject.execute }.to change { MergeRequest.count }.by(1)
+ context 'when bitbucket_server_user_mapping_by_username feature flag is disabled' do
+ before do
+ stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
+ end
- merge_request = MergeRequest.first
- expect(merge_request.notes.count).to eq(2)
- expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
-
- notes = merge_request.notes.order(:id).to_a
- start_note = notes.first
- expect(start_note.type).to eq('DiffNote')
- expect(start_note.note).to end_with(inline_note.note)
- expect(start_note.created_at).to eq(inline_note.created_at)
- expect(start_note.updated_at).to eq(inline_note.updated_at)
- expect(start_note.position.base_sha).to eq(inline_note.from_sha)
- expect(start_note.position.start_sha).to eq(inline_note.from_sha)
- expect(start_note.position.head_sha).to eq(inline_note.to_sha)
- expect(start_note.position.old_line).to be_nil
- expect(start_note.position.new_line).to eq(inline_note.new_pos)
-
- reply_note = notes.last
- # Make sure author and reply context is included
- expect(reply_note.note).to start_with("*By #{reply.author_username} (#{reply.author_email})*\n\n")
- expect(reply_note.note).to end_with("> #{inline_note.note}\n\n#{reply.note}")
- expect(reply_note.author).to eq(project.owner)
- expect(reply_note.created_at).to eq(reply.created_at)
- expect(reply_note.updated_at).to eq(reply.created_at)
- expect(reply_note.position.base_sha).to eq(inline_note.from_sha)
- expect(reply_note.position.start_sha).to eq(inline_note.from_sha)
- expect(reply_note.position.head_sha).to eq(inline_note.to_sha)
- expect(reply_note.position.old_line).to be_nil
- expect(reply_note.position.new_line).to eq(inline_note.new_pos)
+ include_examples 'imports threaded discussions'
+ end
+
+ context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do
+ before do
+ stub_feature_flags(bitbucket_server_user_mapping_by_username: true)
+ end
+
+ include_examples 'imports threaded discussions' do
+ context 'when username is not present' do
+ before do
+ allow(reply).to receive(:author_username).and_return(nil)
+ allow(inline_note).to receive(:author_username).and_return(nil)
+ end
+
+ it 'maps by email' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ notes = MergeRequest.first.notes.order(:id).to_a
+
+ expect(notes.first.author).to eq(inline_note_author)
+ expect(notes.last.author).to eq(reply_author)
+ end
+ end
+ end
+ end
+
+ context 'when user is not found' do
+ before do
+ allow(reply).to receive(:author_username).and_return(nil)
+ allow(reply).to receive(:author_email).and_return(nil)
+ allow(inline_note).to receive(:author_username).and_return(nil)
+ allow(inline_note).to receive(:author_email).and_return(nil)
+ end
+
+ it 'maps importer user' do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ notes = MergeRequest.first.notes.order(:id).to_a
+
+ expect(notes.first.author).to eq(project_creator)
+ expect(notes.last.author).to eq(project_creator)
+ end
+ end
end
it 'falls back to comments if diff comments fail to validate' do
@@ -312,6 +483,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do
state: 'merged',
author: 'Test Author',
author_email: project.owner.email,
+ author_username: 'author',
created_at: Time.now,
updated_at: Time.now,
merged?: true)
diff --git a/spec/lib/gitlab/checks/lfs_integrity_spec.rb b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
index 8fec702790c..4583cd72cfd 100644
--- a/spec/lib/gitlab/checks/lfs_integrity_spec.rb
+++ b/spec/lib/gitlab/checks/lfs_integrity_spec.rb
@@ -57,25 +57,5 @@ RSpec.describe Gitlab::Checks::LfsIntegrity do
expect(subject.objects_missing?).to be_falsey
end
end
-
- context 'for forked project', :sidekiq_might_not_need_inline do
- let(:parent_project) { create(:project, :repository) }
- let(:project) { fork_project(parent_project, nil, repository: true) }
-
- before do
- allow(project).to receive(:lfs_enabled?).and_return(true)
- end
-
- it 'is true parent project is missing LFS objects' do
- expect(subject.objects_missing?).to be_truthy
- end
-
- it 'is false parent project already contains LFS objects for the fork' do
- lfs_object = create(:lfs_object, oid: blob_object.lfs_oid)
- create(:lfs_objects_project, project: parent_project, lfs_object: lfs_object)
-
- expect(subject.objects_missing?).to be_falsey
- end
- end
end
end
diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/project_moved_spec.rb
index e15fa90443b..c7dad0a91d4 100644
--- a/spec/lib/gitlab/checks/project_moved_spec.rb
+++ b/spec/lib/gitlab/checks/project_moved_spec.rb
@@ -57,12 +57,12 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
shared_examples 'returns redirect message' do
it do
message = <<~MSG
- Project '#{redirect_path}' was moved to '#{project.full_path}'.
+ Project '#{redirect_path}' was moved to '#{project.full_path}'.
- Please update your Git remote:
+ Please update your Git remote:
- git remote set-url origin #{url_to_repo}
- MSG
+ git remote set-url origin #{url_to_repo}
+ MSG
expect(subject.message).to eq(message)
end
diff --git a/spec/lib/gitlab/checks/snippet_check_spec.rb b/spec/lib/gitlab/checks/snippet_check_spec.rb
index 2c027486bc9..037de8e9369 100644
--- a/spec/lib/gitlab/checks/snippet_check_spec.rb
+++ b/spec/lib/gitlab/checks/snippet_check_spec.rb
@@ -5,10 +5,12 @@ require 'spec_helper'
RSpec.describe Gitlab::Checks::SnippetCheck do
include_context 'change access checks context'
- let(:snippet) { create(:personal_snippet, :repository) }
+ let_it_be(:snippet) { create(:personal_snippet, :repository) }
+
let(:user_access) { Gitlab::UserAccessSnippet.new(user, snippet: snippet) }
+ let(:default_branch) { snippet.default_branch }
- subject { Gitlab::Checks::SnippetCheck.new(changes, logger: logger) }
+ subject { Gitlab::Checks::SnippetCheck.new(changes, default_branch: default_branch, logger: logger) }
describe '#validate!' do
it 'does not raise any error' do
@@ -39,5 +41,13 @@ RSpec.describe Gitlab::Checks::SnippetCheck do
end
end
end
+
+ context 'when default_branch is nil' do
+ let(:default_branch) { nil }
+
+ it 'raises an error' do
+ expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can not create or delete branches.')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
index e982f0eb015..83a37655ea9 100644
--- a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
+++ b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
@@ -18,6 +18,17 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do
expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
end
+ context 'when FF ci_new_artifact_file_reader is disabled' do
+ before do
+ stub_feature_flags(ci_new_artifact_file_reader: false)
+ end
+
+ it 'returns the content at the path' do
+ is_expected.to be_present
+ expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
+ end
+ end
+
context 'when path does not exist' do
let(:path) { 'file/does/not/exist.txt' }
let(:expected_error) do
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index ca02eaee0a0..ab760b107f8 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -74,16 +74,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it { is_expected.to be_falsey }
end
- context 'when config does not contain script' do
- let(:name) { :build }
-
- let(:config) do
- { before_script: "cd ${PROJ_DIR} " }
- end
-
- it { is_expected.to be_truthy }
- end
-
context 'when using the default job without script' do
let(:name) { :default }
let(:config) do
@@ -104,14 +94,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it { is_expected.to be_truthy }
end
-
- context 'there are no shared keys between jobs and bridges' do
- subject(:shared_values) do
- described_class::ALLOWED_KEYS & Gitlab::Ci::Config::Entry::Bridge::ALLOWED_KEYS
- end
-
- it { is_expected.to be_empty }
- end
end
describe 'validations' do
diff --git a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
index 8561bd330b7..ac6b589ec6b 100644
--- a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Jobs do
let(:config) { { rspec: nil } }
it 'reports error' do
- expect(entry.errors).to include "jobs config should contain valid jobs"
+ expect(entry.errors).to include 'jobs rspec config should implement a script: or a trigger: keyword'
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 140b3c4f55b..252bda6461d 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -344,9 +344,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
end
describe '#errors' do
- it 'reports errors about missing script' do
+ it 'reports errors about missing script or trigger' do
expect(root.errors)
- .to include "root config contains unknown keys: rspec"
+ .to include 'jobs rspec config should implement a script: or a trigger: keyword'
end
end
end
diff --git a/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
index bab604c4504..fbf86927bd9 100644
--- a/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
expect(subject.map(&:attributes)).to match_array(
[
{
- name: 'test 1/4',
+ name: 'test: [aws, app1]',
instance: 1,
parallel: { total: 4 },
variables: {
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
}
},
{
- name: 'test 2/4',
+ name: 'test: [aws, app2]',
instance: 2,
parallel: { total: 4 },
variables: {
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
}
},
{
- name: 'test 3/4',
+ name: 'test: [ovh, app]',
instance: 3,
parallel: { total: 4 },
variables: {
@@ -70,7 +70,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
}
},
{
- name: 'test 4/4',
+ name: 'test: [gcp, app]',
instance: 4,
parallel: { total: 4 },
variables: {
@@ -84,18 +84,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
it 'has parallelized name' do
expect(subject.map(&:name)).to match_array(
- ['test 1/4', 'test 2/4', 'test 3/4', 'test 4/4']
- )
- end
-
- it 'has details' do
- expect(subject.map(&:name_with_details)).to match_array(
- [
- 'test (PROVIDER=aws; STACK=app1)',
- 'test (PROVIDER=aws; STACK=app2)',
- 'test (PROVIDER=gcp; STACK=app)',
- 'test (PROVIDER=ovh; STACK=app)'
- ]
+ ['test: [aws, app1]', 'test: [aws, app2]', 'test: [gcp, app]', 'test: [ovh, app]']
)
end
end
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index 949af8cdc4c..4c19657413c 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -178,8 +178,8 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
{
matrix: [
{
- VAR_1: [1],
- VAR_2: [2, 3]
+ VAR_1: ['A'],
+ VAR_2: %w[B C]
}
]
}
@@ -187,8 +187,8 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
let(:expanded_job_names) do
[
- 'rspec 1/2',
- 'rspec 2/2'
+ 'rspec: [A, B]',
+ 'rspec: [A, C]'
]
end
@@ -196,21 +196,17 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
is_expected.not_to include(job_name)
end
- it 'has parallelized jobs' do
- is_expected.to include(*expanded_job_names.map(&:to_sym))
- end
-
it 'sets job instance in options' do
expect(subject.values).to all(include(:instance))
end
it 'sets job variables', :aggregate_failures do
expect(subject.values[0]).to match(
- a_hash_including(variables: { VAR_1: 1, VAR_2: 2, USER_VARIABLE: 'user value' })
+ a_hash_including(variables: { VAR_1: 'A', VAR_2: 'B', USER_VARIABLE: 'user value' })
)
expect(subject.values[1]).to match(
- a_hash_including(variables: { VAR_1: 1, VAR_2: 3, USER_VARIABLE: 'user value' })
+ a_hash_including(variables: { VAR_1: 'A', VAR_2: 'C', USER_VARIABLE: 'user value' })
)
end
@@ -226,6 +222,10 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
expect(configs).to all(match(a_hash_including(original_config)))
end
+ it 'has parallelized jobs' do
+ is_expected.to include(*expanded_job_names.map(&:to_sym))
+ end
+
it_behaves_like 'parallel dependencies'
it_behaves_like 'parallel needs'
end
@@ -238,5 +238,11 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
is_expected.to match(config)
end
end
+
+ context 'when jobs config is nil' do
+ let(:config) { nil }
+
+ it { is_expected.to eq({}) }
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 18be9558829..41a45fe4ab7 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -312,7 +312,7 @@ RSpec.describe Gitlab::Ci::Config do
HEREDOC
end
- it 'raises error YamlProcessor validationError' do
+ it 'raises ConfigError' do
expect { config }.to raise_error(
described_class::ConfigError,
"Included file `invalid` does not have YAML extension!"
@@ -329,7 +329,7 @@ RSpec.describe Gitlab::Ci::Config do
HEREDOC
end
- it 'raises error YamlProcessor validationError' do
+ it 'raises ConfigError' do
expect { config }.to raise_error(
described_class::ConfigError,
'Include `{"remote":"http://url","local":"/local/file.yml"}` needs to match exactly one accessor!'
diff --git a/spec/lib/gitlab/ci/jwt_spec.rb b/spec/lib/gitlab/ci/jwt_spec.rb
index a15f3310dab..9b133efad9c 100644
--- a/spec/lib/gitlab/ci/jwt_spec.rb
+++ b/spec/lib/gitlab/ci/jwt_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::Ci::Jwt do
subject(:payload) { described_class.new(build, ttl: 30).payload }
it 'has correct values for the standard JWT attributes' do
- Timecop.freeze do
+ freeze_time do
now = Time.now.to_i
aggregate_failures do
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
new file mode 100644
index 00000000000..077c0fd3162
--- /dev/null
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -0,0 +1,251 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Lint do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:lint) { described_class.new(project: project, current_user: user) }
+
+ describe '#validate' do
+ subject { lint.validate(content, dry_run: dry_run) }
+
+ shared_examples 'content is valid' do
+ let(:content) do
+ <<~YAML
+ build:
+ stage: build
+ before_script:
+ - before_build
+ script: echo
+ environment: staging
+ when: manual
+ rspec:
+ stage: test
+ script: rspec
+ after_script:
+ - after_rspec
+ tags: [docker]
+ YAML
+ end
+
+ it 'returns a valid result', :aggregate_failures do
+ expect(subject).to be_valid
+
+ expect(subject.errors).to be_empty
+ expect(subject.warnings).to be_empty
+ expect(subject.jobs).to be_present
+
+ build_job = subject.jobs.first
+ expect(build_job[:name]).to eq('build')
+ expect(build_job[:stage]).to eq('build')
+ expect(build_job[:before_script]).to eq(['before_build'])
+ expect(build_job[:script]).to eq(['echo'])
+ expect(build_job.fetch(:after_script)).to eq([])
+ expect(build_job[:tag_list]).to eq([])
+ expect(build_job[:environment]).to eq('staging')
+ expect(build_job[:when]).to eq('manual')
+ expect(build_job[:allow_failure]).to eq(true)
+
+ rspec_job = subject.jobs.last
+ expect(rspec_job[:name]).to eq('rspec')
+ expect(rspec_job[:stage]).to eq('test')
+ expect(rspec_job.fetch(:before_script)).to eq([])
+ expect(rspec_job[:script]).to eq(['rspec'])
+ expect(rspec_job[:after_script]).to eq(['after_rspec'])
+ expect(rspec_job[:tag_list]).to eq(['docker'])
+ expect(rspec_job.fetch(:environment)).to be_nil
+ expect(rspec_job[:when]).to eq('on_success')
+ expect(rspec_job[:allow_failure]).to eq(false)
+ end
+ end
+
+ shared_examples 'content with errors and warnings' do
+ context 'when content has errors' do
+ let(:content) do
+ <<~YAML
+ build:
+ invalid: syntax
+ YAML
+ end
+
+ it 'returns a result with errors' do
+ expect(subject).not_to be_valid
+ expect(subject.errors).to include(/jobs build config should implement a script: or a trigger: keyword/)
+ end
+ end
+
+ context 'when content has warnings' do
+ let(:content) do
+ <<~YAML
+ rspec:
+ script: rspec
+ rules:
+ - when: always
+ YAML
+ end
+
+ it 'returns a result with warnings' do
+ expect(subject).to be_valid
+ expect(subject.warnings).to include(/rspec may allow multiple pipelines to run/)
+ end
+ end
+
+ context 'when content has more warnings than max limit' do
+ # content will result in 2 warnings
+ let(:content) do
+ <<~YAML
+ rspec:
+ script: rspec
+ rules:
+ - when: always
+ rspec2:
+ script: rspec
+ rules:
+ - when: always
+ YAML
+ end
+
+ before do
+ stub_const('Gitlab::Ci::Warnings::MAX_LIMIT', 1)
+ end
+
+ it 'returns a result with warnings' do
+ expect(subject).to be_valid
+ expect(subject.warnings.size).to eq(1)
+ end
+ end
+
+ context 'when content has errors and warnings' do
+ let(:content) do
+ <<~YAML
+ rspec:
+ script: rspec
+ rules:
+ - when: always
+ karma:
+ script: karma
+ unknown: key
+ YAML
+ end
+
+ it 'returns a result with errors and warnings' do
+ expect(subject).not_to be_valid
+ expect(subject.errors).to include(/karma config contains unknown keys/)
+ expect(subject.warnings).to include(/rspec may allow multiple pipelines to run/)
+ end
+ end
+ end
+
+ shared_context 'advanced validations' do
+ let(:content) do
+ <<~YAML
+ build:
+ stage: build
+ script: echo
+ rules:
+ - if: '$CI_MERGE_REQUEST_ID'
+ test:
+ stage: test
+ script: echo
+ needs: [build]
+ YAML
+ end
+ end
+
+ context 'when user has permissions to write the ref' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when using default static mode' do
+ let(:dry_run) { false }
+
+ it_behaves_like 'content with errors and warnings'
+
+ it_behaves_like 'content is valid' do
+ it 'includes extra attributes' do
+ subject.jobs.each do |job|
+ expect(job[:only]).to eq(refs: %w[branches tags])
+ expect(job.fetch(:except)).to be_nil
+ end
+ end
+ end
+
+ include_context 'advanced validations' do
+ it 'does not catch advanced logical errors' do
+ expect(subject).to be_valid
+ expect(subject.errors).to be_empty
+ end
+ end
+
+ it 'uses YamlProcessor' do
+ expect(Gitlab::Ci::YamlProcessor)
+ .to receive(:new)
+ .and_call_original
+
+ subject
+ end
+ end
+
+ context 'when using dry run mode' do
+ let(:dry_run) { true }
+
+ it_behaves_like 'content with errors and warnings'
+
+ it_behaves_like 'content is valid' do
+ it 'does not include extra attributes' do
+ subject.jobs.each do |job|
+ expect(job.key?(:only)).to be_falsey
+ expect(job.key?(:except)).to be_falsey
+ end
+ end
+ end
+
+ include_context 'advanced validations' do
+ it 'runs advanced logical validations' do
+ expect(subject).not_to be_valid
+ expect(subject.errors).to eq(["test: needs 'build'"])
+ end
+ end
+
+ it 'uses Ci::CreatePipelineService' do
+ expect(::Ci::CreatePipelineService)
+ .to receive(:new)
+ .and_call_original
+
+ subject
+ end
+ end
+ end
+
+ context 'when user does not have permissions to write the ref' do
+ before do
+ project.add_reporter(user)
+ end
+
+ context 'when using default static mode' do
+ let(:dry_run) { false }
+
+ it_behaves_like 'content is valid'
+ end
+
+ context 'when using dry run mode' do
+ let(:dry_run) { true }
+
+ let(:content) do
+ <<~YAML
+ job:
+ script: echo
+ YAML
+ end
+
+ it 'does not allow validation' do
+ expect(subject).not_to be_valid
+ expect(subject.errors).to include('Insufficient permissions to create a new pipeline')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb
index 8b9de16ce5f..11e3f32c7ce 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/remove_unwanted_chat_jobs_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs do
let(:command) do
double(:command,
- config_processor: double(:processor,
+ yaml_processor_result: double(:processor,
jobs: { echo: double(:job_echo), rspec: double(:job_rspec) }),
project: project,
chat_data: { command: 'echo' })
@@ -25,7 +25,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs do
subject
- expect(command.config_processor.jobs.keys).to eq([:echo])
+ expect(command.yaml_processor_result.jobs.keys).to eq([:echo])
end
it 'does not remove any jobs for non chat-pipelines' do
@@ -33,7 +33,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs do
subject
- expect(command.config_processor.jobs.keys).to eq([:echo, :rspec])
+ expect(command.yaml_processor_result.jobs.keys).to eq([:echo, :rspec])
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index de580d2e148..e55281f9705 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -31,20 +31,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
CI_YAML
end
- let(:yaml_processor) do
+ let(:yaml_processor_result) do
::Gitlab::Ci::YamlProcessor.new(
ci_yaml, {
project: project,
sha: pipeline.sha,
user: user
}
- )
+ ).execute
end
let(:save_incompleted) { true }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
- project: project, current_user: user, config_processor: yaml_processor, save_incompleted: save_incompleted
+ project: project, current_user: user, yaml_processor_result: yaml_processor_result, save_incompleted: save_incompleted
)
end
@@ -128,7 +128,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
end
describe '#validation_service_payload' do
- subject(:validation_service_payload) { step.send(:validation_service_payload, pipeline, command.config_processor.stages_attributes) }
+ subject(:validation_service_payload) { step.send(:validation_service_payload, pipeline, command.yaml_processor_result.stages_attributes) }
it 'respects the defined schema' do
expect(validation_service_payload).to match_schema('/external_validation')
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
index 6e242faa885..fc5725a4d17 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
@@ -90,24 +90,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexer do
end
with_them do
- context 'when ci_if_parenthesis_enabled is enabled' do
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: true)
- end
-
- it { is_expected.to eq(tokens) }
- end
-
- context 'when ci_if_parenthesis_enabled is disabled' do
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: false)
- end
-
- it do
- expect { subject }
- .to raise_error described_class::SyntaxError
- end
- end
+ it { is_expected.to eq(tokens) }
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
index 3394a75ac0a..a02c247925e 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
@@ -3,10 +3,6 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Expression::Parser do
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: true)
- end
-
describe '#tree' do
context 'validates simple operators' do
using RSpec::Parameterized::TableSyntax
@@ -31,36 +27,15 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Parser do
context 'when combining && and OR operators' do
subject { described_class.seed('$VAR1 == "a" || $VAR2 == "b" && $VAR3 == "c" || $VAR4 == "d" && $VAR5 == "e"').tree }
- context 'when parenthesis engine is enabled' do
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: true)
- end
-
- it 'returns operations in a correct order' do
- expect(subject.inspect)
- .to eq('or(or(equals($VAR1, "a"), and(equals($VAR2, "b"), equals($VAR3, "c"))), and(equals($VAR4, "d"), equals($VAR5, "e")))')
- end
- end
-
- context 'when parenthesis engine is disabled (legacy)' do
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: false)
- end
-
- it 'returns operations in a invalid order' do
- expect(subject.inspect)
- .to eq('or(equals($VAR1, "a"), and(equals($VAR2, "b"), or(equals($VAR3, "c"), and(equals($VAR4, "d"), equals($VAR5, "e")))))')
- end
+ it 'returns operations in a correct order' do
+ expect(subject.inspect)
+ .to eq('or(or(equals($VAR1, "a"), and(equals($VAR2, "b"), equals($VAR3, "c"))), and(equals($VAR4, "d"), equals($VAR5, "e")))')
end
end
context 'when using parenthesis' do
subject { described_class.seed('(($VAR1 == "a" || $VAR2 == "b") && $VAR3 == "c" || $VAR4 == "d") && $VAR5 == "e"').tree }
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: true)
- end
-
it 'returns operations in a correct order' do
expect(subject.inspect)
.to eq('and(or(and(or(equals($VAR1, "a"), equals($VAR2, "b")), equals($VAR3, "c")), equals($VAR4, "d")), equals($VAR5, "e"))')
@@ -96,38 +71,21 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Parser do
end
context 'when parenthesis are unmatched' do
- context 'when parenthesis engine is enabled' do
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: true)
- end
-
- where(:expression) do
- [
- '$VAR == (',
- '$VAR2 == ("aa"',
- '$VAR2 == ("aa"))',
- '$VAR2 == "aa")',
- '(($VAR2 == "aa")',
- '($VAR2 == "aa"))'
- ]
- end
-
- with_them do
- it 'raises a ParseError' do
- expect { described_class.seed(expression).tree }
- .to raise_error Gitlab::Ci::Pipeline::Expression::Parser::ParseError
- end
- end
+ where(:expression) do
+ [
+ '$VAR == (',
+ '$VAR2 == ("aa"',
+ '$VAR2 == ("aa"))',
+ '$VAR2 == "aa")',
+ '(($VAR2 == "aa")',
+ '($VAR2 == "aa"))'
+ ]
end
- context 'when parenthesis engine is disabled' do
- before do
- stub_feature_flags(ci_if_parenthesis_enabled: false)
- end
-
- it 'raises an SyntaxError' do
- expect { described_class.seed('$VAR == (').tree }
- .to raise_error Gitlab::Ci::Pipeline::Expression::Lexer::SyntaxError
+ with_them do
+ it 'raises a ParseError' do
+ expect { described_class.seed(expression).tree }
+ .to raise_error Gitlab::Ci::Pipeline::Expression::Parser::ParseError
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 733ab30132d..34df0e86a18 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -931,47 +931,30 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'when using 101 needs' do
let(:needs_count) { 101 }
- context 'when ci_plan_needs_size_limit is disabled' do
+ it "returns an error" do
+ expect(subject.errors).to contain_exactly(
+ "rspec: one job can only need 50 others, but you have listed 101. See needs keyword documentation for more details")
+ end
+
+ context 'when ci_needs_size_limit is set to 100' do
before do
- stub_feature_flags(ci_plan_needs_size_limit: false)
+ project.actual_limits.update!(ci_needs_size_limit: 100)
end
it "returns an error" do
expect(subject.errors).to contain_exactly(
- "rspec: one job can only need 10 others, but you have listed 101. See needs keyword documentation for more details")
+ "rspec: one job can only need 100 others, but you have listed 101. See needs keyword documentation for more details")
end
end
- context 'when ci_plan_needs_size_limit is enabled' do
+ context 'when ci_needs_size_limit is set to 0' do
before do
- stub_feature_flags(ci_plan_needs_size_limit: true)
+ project.actual_limits.update!(ci_needs_size_limit: 0)
end
it "returns an error" do
expect(subject.errors).to contain_exactly(
- "rspec: one job can only need 50 others, but you have listed 101. See needs keyword documentation for more details")
- end
-
- context 'when ci_needs_size_limit is set to 100' do
- before do
- project.actual_limits.update!(ci_needs_size_limit: 100)
- end
-
- it "returns an error" do
- expect(subject.errors).to contain_exactly(
- "rspec: one job can only need 100 others, but you have listed 101. See needs keyword documentation for more details")
- end
- end
-
- context 'when ci_needs_size_limit is set to 0' do
- before do
- project.actual_limits.update!(ci_needs_size_limit: 0)
- end
-
- it "returns an error" do
- expect(subject.errors).to contain_exactly(
- "rspec: one job can only need 0 others, but you have listed 101. See needs keyword documentation for more details")
- end
+ "rspec: one job can only need 0 others, but you have listed 101. See needs keyword documentation for more details")
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb b/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
new file mode 100644
index 00000000000..89602fe79d1
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline_object_hierarchy_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::PipelineObjectHierarchy do
+ include Ci::SourcePipelineHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:ancestor) { create(:ci_pipeline, project: project) }
+ let_it_be(:parent) { create(:ci_pipeline, project: project) }
+ let_it_be(:child) { create(:ci_pipeline, project: project) }
+ let_it_be(:cousin_parent) { create(:ci_pipeline, project: project) }
+ let_it_be(:cousin) { create(:ci_pipeline, project: project) }
+ let_it_be(:triggered_pipeline) { create(:ci_pipeline) }
+
+ before_all do
+ create_source_pipeline(ancestor, parent)
+ create_source_pipeline(ancestor, cousin_parent)
+ create_source_pipeline(parent, child)
+ create_source_pipeline(cousin_parent, cousin)
+ create_source_pipeline(child, triggered_pipeline)
+ end
+
+ describe '#base_and_ancestors' do
+ it 'includes the base and its ancestors' do
+ relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
+ options: { same_project: true }).base_and_ancestors
+
+ expect(relation).to contain_exactly(ancestor, parent)
+ end
+
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(::Ci::Pipeline.where(id: child.id),
+ options: { same_project: true }).base_and_ancestors(upto: ancestor.id)
+
+ expect(relation).to contain_exactly(parent, child)
+ end
+
+ describe 'hierarchy_order option' do
+ let(:relation) do
+ described_class.new(::Ci::Pipeline.where(id: child.id),
+ options: { same_project: true }).base_and_ancestors(hierarchy_order: hierarchy_order)
+ end
+
+ context ':asc' do
+ let(:hierarchy_order) { :asc }
+
+ it 'orders by child to ancestor' do
+ expect(relation).to eq([child, parent, ancestor])
+ end
+ end
+
+ context ':desc' do
+ let(:hierarchy_order) { :desc }
+
+ it 'orders by ancestor to child' do
+ expect(relation).to eq([ancestor, parent, child])
+ end
+ end
+ end
+ end
+
+ describe '#base_and_descendants' do
+ it 'includes the base and its descendants' do
+ relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
+ options: { same_project: true }).base_and_descendants
+
+ expect(relation).to contain_exactly(parent, child)
+ end
+
+ context 'when with_depth is true' do
+ let(:relation) do
+ described_class.new(::Ci::Pipeline.where(id: ancestor.id),
+ options: { same_project: true }).base_and_descendants(with_depth: true)
+ end
+
+ it 'includes depth in the results' do
+ object_depths = {
+ ancestor.id => 1,
+ parent.id => 2,
+ cousin_parent.id => 2,
+ child.id => 3,
+ cousin.id => 3
+ }
+
+ relation.each do |object|
+ expect(object.depth).to eq(object_depths[object.id])
+ end
+ end
+ end
+ end
+
+ describe '#all_objects' do
+ it 'includes its ancestors and descendants' do
+ relation = described_class.new(::Ci::Pipeline.where(id: parent.id),
+ options: { same_project: true }).all_objects
+
+ expect(relation).to contain_exactly(ancestor, parent, child)
+ end
+
+ it 'returns all family tree' do
+ relation = described_class.new(
+ ::Ci::Pipeline.where(id: child.id),
+ described_class.new(::Ci::Pipeline.where(id: child.id), options: { same_project: true }).base_and_ancestors,
+ options: { same_project: true }
+ ).all_objects
+
+ expect(relation).to contain_exactly(ancestor, parent, cousin_parent, child, cousin)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/test_case_spec.rb b/spec/lib/gitlab/ci/reports/test_case_spec.rb
index 8882defbd9e..7fb208213c1 100644
--- a/spec/lib/gitlab/ci/reports/test_case_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_case_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Reports::TestCase do
describe '#initialize' do
- let(:test_case) { described_class.new(params)}
+ let(:test_case) { described_class.new(params) }
context 'when both classname and name are given' do
context 'when test case is passed' do
@@ -62,7 +62,9 @@ RSpec.describe Gitlab::Ci::Reports::TestCase do
end
context 'when attachment is present' do
- let(:attachment_test_case) { build(:test_case, :failed_with_attachment) }
+ let_it_be(:job) { create(:ci_build) }
+
+ let(:attachment_test_case) { build(:test_case, :failed_with_attachment, job: job) }
it "initializes the attachment if present" do
expect(attachment_test_case.attachment).to eq("some/path.png")
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index fbe3473f6b0..15fa78444e5 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -176,6 +176,37 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
end
end
+ describe '#sorted' do
+ subject { test_suite.sorted }
+
+ context 'when there are multiple failed test cases' do
+ before do
+ test_suite.add_test_case(create_test_case_rspec_failed('test_spec_1', 1.11))
+ test_suite.add_test_case(create_test_case_rspec_failed('test_spec_2', 4.44))
+ end
+
+ it 'returns test cases sorted by execution time desc' do
+ expect(subject.test_cases['failed'].each_value.first.execution_time).to eq(4.44)
+ expect(subject.test_cases['failed'].values.second.execution_time).to eq(1.11)
+ end
+ end
+
+ context 'when there are multiple test cases' do
+ let(:status_ordered) { %w(error failed success skipped) }
+
+ before do
+ test_suite.add_test_case(test_case_success)
+ test_suite.add_test_case(test_case_failed)
+ test_suite.add_test_case(test_case_error)
+ test_suite.add_test_case(test_case_skipped)
+ end
+
+ it 'returns test cases sorted by status' do
+ expect(subject.test_cases.keys).to eq(status_ordered)
+ end
+ end
+ end
+
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
describe "##{status_type}" do
subject { test_suite.public_send("#{status_type}") }
diff --git a/spec/lib/gitlab/ci/status/bridge/common_spec.rb b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
new file mode 100644
index 00000000000..92600b21afc
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Status::Bridge::Common do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:bridge) { create(:ci_bridge) }
+ let_it_be(:downstream_pipeline) { create(:ci_pipeline) }
+
+ before_all do
+ create(:ci_sources_pipeline,
+ source_pipeline: bridge.pipeline,
+ source_project: bridge.pipeline.project,
+ source_job: bridge,
+ pipeline: downstream_pipeline,
+ project: downstream_pipeline.project)
+ end
+
+ subject do
+ Gitlab::Ci::Status::Core
+ .new(bridge, user)
+ .extend(described_class)
+ end
+
+ describe '#details_path' do
+ context 'when user has access to read downstream pipeline' do
+ before do
+ downstream_pipeline.project.add_developer(user)
+ end
+
+ it { expect(subject).to have_details }
+ it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" }
+
+ context 'when ci_bridge_pipeline_details is disabled' do
+ before do
+ stub_feature_flags(ci_bridge_pipeline_details: false)
+ end
+
+ it { expect(subject).not_to have_details }
+ it { expect(subject.details_path).to be_nil }
+ end
+ end
+
+ context 'when user does not have access to read downstream pipeline' do
+ it { expect(subject).not_to have_details }
+ it { expect(subject.details_path).to be_nil }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index e1dcd05373f..bcfb9f19792 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::Ci::Status::Composite do
shared_examples 'compares status and warnings' do
let(:composite_status) do
- described_class.new(all_statuses)
+ described_class.new(all_statuses, dag: dag)
end
it 'returns status and warnings?' do
@@ -30,21 +30,29 @@ RSpec.describe Gitlab::Ci::Status::Composite do
end
context 'allow_failure: false' do
- where(:build_statuses, :result, :has_warnings) do
- %i(skipped) | 'skipped' | false
- %i(skipped success) | 'success' | false
- %i(created) | 'created' | false
- %i(preparing) | 'preparing' | false
- %i(canceled success skipped) | 'canceled' | false
- %i(pending created skipped) | 'pending' | false
- %i(pending created skipped success) | 'running' | false
- %i(running created skipped success) | 'running' | false
- %i(success waiting_for_resource) | 'waiting_for_resource' | false
- %i(success manual) | 'manual' | false
- %i(success scheduled) | 'scheduled' | false
- %i(created preparing) | 'preparing' | false
- %i(created success pending) | 'running' | false
- %i(skipped success failed) | 'failed' | false
+ where(:build_statuses, :dag, :result, :has_warnings) do
+ %i(skipped) | false | 'skipped' | false
+ %i(skipped success) | false | 'success' | false
+ %i(skipped success) | true | 'skipped' | false
+ %i(created) | false | 'created' | false
+ %i(preparing) | false | 'preparing' | false
+ %i(canceled success skipped) | false | 'canceled' | false
+ %i(canceled success skipped) | true | 'skipped' | false
+ %i(pending created skipped) | false | 'pending' | false
+ %i(pending created skipped success) | false | 'running' | false
+ %i(running created skipped success) | false | 'running' | false
+ %i(pending created skipped) | true | 'skipped' | false
+ %i(pending created skipped success) | true | 'skipped' | false
+ %i(running created skipped success) | true | 'skipped' | false
+ %i(success waiting_for_resource) | false | 'waiting_for_resource' | false
+ %i(success manual) | false | 'manual' | false
+ %i(success scheduled) | false | 'scheduled' | false
+ %i(created preparing) | false | 'preparing' | false
+ %i(created success pending) | false | 'running' | false
+ %i(skipped success failed) | false | 'failed' | false
+ %i(skipped success failed) | true | 'skipped' | false
+ %i(success manual) | true | 'pending' | false
+ %i(success failed created) | true | 'pending' | false
end
with_them do
@@ -57,11 +65,12 @@ RSpec.describe Gitlab::Ci::Status::Composite do
end
context 'allow_failure: true' do
- where(:build_statuses, :result, :has_warnings) do
- %i(manual) | 'skipped' | false
- %i(skipped failed) | 'success' | true
- %i(created failed) | 'created' | true
- %i(preparing manual) | 'preparing' | false
+ where(:build_statuses, :dag, :result, :has_warnings) do
+ %i(manual) | false | 'skipped' | false
+ %i(skipped failed) | false | 'success' | true
+ %i(skipped failed) | true | 'skipped' | true
+ %i(created failed) | false | 'created' | true
+ %i(preparing manual) | false | 'preparing' | false
end
with_them do
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index def4d1b3bf6..768256ee6b3 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -3,21 +3,21 @@
require 'spec_helper'
RSpec.describe 'CI YML Templates' do
- subject { Gitlab::Ci::YamlProcessor.new(content) }
+ subject { Gitlab::Ci::YamlProcessor.new(content).execute }
let(:all_templates) { Gitlab::Template::GitlabCiYmlTemplate.all.map(&:full_name) }
- let(:disabled_templates) do
- Gitlab::Template::GitlabCiYmlTemplate.disabled_templates.map do |template|
- template + Gitlab::Template::GitlabCiYmlTemplate.extension
+ let(:excluded_templates) do
+ all_templates.select do |name|
+ Gitlab::Template::GitlabCiYmlTemplate.excluded_patterns.any? { |pattern| pattern.match?(name) }
end
end
- context 'included in a CI YAML configuration' do
+ context 'when including available templates in a CI YAML configuration' do
using RSpec::Parameterized::TableSyntax
where(:template_name) do
- all_templates - disabled_templates
+ all_templates - excluded_templates
end
with_them do
@@ -33,7 +33,7 @@ RSpec.describe 'CI YML Templates' do
end
it 'is valid' do
- expect { subject }.not_to raise_error
+ expect(subject).to be_valid
end
it 'require default stages to be included' do
@@ -41,4 +41,29 @@ RSpec.describe 'CI YML Templates' do
end
end
end
+
+ context 'when including unavailable templates in a CI YAML configuration' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:template_name) do
+ excluded_templates
+ end
+
+ with_them do
+ let(:content) do
+ <<~EOS
+ include:
+ - template: #{template_name}
+
+ concrete_build_implemented_by_a_user:
+ stage: test
+ script: do something
+ EOS
+ end
+
+ it 'is not valid' do
+ expect(subject).not_to be_valid
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index e28469c9404..d65b6fb41f6 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -151,6 +151,28 @@ RSpec.describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
it_behaves_like 'appends'
end
+
+ describe 'metrics' do
+ let(:metrics) { spy('metrics') }
+ let(:io) { StringIO.new }
+ let(:stream) { described_class.new(metrics) { io } }
+
+ it 'increments trace streamed operation' do
+ stream.append(+'123456', 0)
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :streamed)
+ end
+
+ it 'increments trace bytes counter' do
+ stream.append(+'123456', 0)
+
+ expect(metrics)
+ .to have_received(:increment_trace_bytes)
+ .with(6)
+ end
+ end
end
describe '#set' do
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 85edf27d3e7..171877dbaee 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -11,6 +11,29 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state do
it { expect(trace).to delegate_method(:old_trace).to(:job) }
end
+ context 'when trace is migrated to object storage' do
+ let!(:job) { create(:ci_build, :trace_artifact) }
+ let!(:artifact1) { job.job_artifacts_trace }
+ let!(:artifact2) { job.reload.job_artifacts_trace }
+ let(:test_data) { "hello world" }
+
+ before do
+ stub_artifacts_object_storage
+
+ artifact1.file.migrate!(ObjectStorage::Store::REMOTE)
+ end
+
+ it 'reloads the trace after is it migrated' do
+ stub_const('Gitlab::HttpIO::BUFFER_SIZE', test_data.length)
+
+ expect_next_instance_of(Gitlab::HttpIO) do |http_io|
+ expect(http_io).to receive(:get_chunk).and_return(test_data, "")
+ end
+
+ expect(artifact2.job.trace.raw).to eq(test_data)
+ end
+ end
+
context 'when live trace feature is disabled' do
before do
stub_feature_flags(ci_enable_live_trace: false)
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 1c81cc83cd1..d596494a987 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -7,10 +7,16 @@ module Gitlab
RSpec.describe YamlProcessor do
include StubRequests
- subject { described_class.new(config, user: nil) }
+ subject { described_class.new(config, user: nil).execute }
+
+ shared_examples 'returns errors' do |error_message|
+ it 'adds a message when an error is encountered' do
+ expect(subject.errors).to include(error_message)
+ end
+ end
describe '#build_attributes' do
- subject { described_class.new(config, user: nil).build_attributes(:rspec) }
+ subject { described_class.new(config, user: nil).execute.build_attributes(:rspec) }
describe 'attributes list' do
let(:config) do
@@ -92,7 +98,7 @@ module Gitlab
config = YAML.dump({ default: { tags: %w[A B] },
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@@ -139,7 +145,7 @@ module Gitlab
config = YAML.dump({ default: { interruptible: true },
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@@ -345,9 +351,7 @@ module Gitlab
EOYML
end
- it 'parses the workflow:rules configuration' do
- expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'workflow config contains unknown keys: variables')
- end
+ it_behaves_like 'returns errors', 'workflow config contains unknown keys: variables'
end
context 'with rules and variables' do
@@ -470,12 +474,11 @@ module Gitlab
end
it 'is propagated all the way up into the raised exception' do
- expect { subject }.to raise_error do |error|
- expect(error).to be_a(described_class::ValidationError)
- expect(error.message).to eq('jobs:invalid:artifacts config should be a hash')
- expect(error.warnings).to contain_exactly(/jobs:rspec may allow multiple pipelines to run/)
- end
+ expect(subject).not_to be_valid
+ expect(subject.warnings).to contain_exactly(/jobs:rspec may allow multiple pipelines to run/)
end
+
+ it_behaves_like 'returns errors', 'jobs:invalid:artifacts config should be a hash'
end
context 'when error is raised before composing the config' do
@@ -489,23 +492,18 @@ module Gitlab
EOYML
end
- it 'raises an exception with empty warnings array' do
- expect { subject }.to raise_error do |error|
- expect(error).to be_a(described_class::ValidationError)
- expect(error.message).to eq('Local file `unknown/file.yml` does not have project!')
- expect(error.warnings).to be_empty
- end
+ it 'has empty warnings' do
+ expect(subject.warnings).to be_empty
end
+
+ it_behaves_like 'returns errors', 'Local file `unknown/file.yml` does not have project!'
end
context 'when error is raised after composing the config with warnings' do
shared_examples 'has warnings and expected error' do |error_message|
- it 'raises an exception including warnings' do
- expect { subject }.to raise_error do |error|
- expect(error).to be_a(described_class::ValidationError)
- expect(error.message).to match(error_message)
- expect(error.warnings).to be_present
- end
+ it 'returns errors and warnings', :aggregate_failures do
+ expect(subject.errors).to include(error_message)
+ expect(subject.warnings).to be_present
end
end
@@ -585,72 +583,56 @@ module Gitlab
describe 'only / except policies validations' do
context 'when `only` has an invalid value' do
let(:config) { { rspec: { script: "rspec", type: "test", only: only } } }
- let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'when it is integer' do
let(:only) { 1 }
- it do
- expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:only has to be either an array of conditions or a hash')
- end
+ it_behaves_like 'returns errors', 'jobs:rspec:only has to be either an array of conditions or a hash'
end
context 'when it is an array of integers' do
let(:only) { [1, 1] }
- it do
- expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:only config should be an array of strings or regexps')
- end
+ it_behaves_like 'returns errors', 'jobs:rspec:only config should be an array of strings or regexps'
end
context 'when it is invalid regex' do
let(:only) { ["/*invalid/"] }
- it do
- expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:only config should be an array of strings or regexps')
- end
+ it_behaves_like 'returns errors', 'jobs:rspec:only config should be an array of strings or regexps'
end
end
context 'when `except` has an invalid value' do
let(:config) { { rspec: { script: "rspec", except: except } } }
- let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'when it is integer' do
let(:except) { 1 }
- it do
- expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:except has to be either an array of conditions or a hash')
- end
+ it_behaves_like 'returns errors', 'jobs:rspec:except has to be either an array of conditions or a hash'
end
context 'when it is an array of integers' do
let(:except) { [1, 1] }
- it do
- expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:except config should be an array of strings or regexps')
- end
+ it_behaves_like 'returns errors', 'jobs:rspec:except config should be an array of strings or regexps'
end
context 'when it is invalid regex' do
let(:except) { ["/*invalid/"] }
- it do
- expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:except config should be an array of strings or regexps')
- end
+ it_behaves_like 'returns errors', 'jobs:rspec:except config should be an array of strings or regexps'
end
end
end
describe "Scripts handling" do
let(:config_data) { YAML.dump(config) }
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data).execute }
subject { config_processor.stage_builds_attributes('test').first }
@@ -819,7 +801,7 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@@ -852,7 +834,7 @@ module Gitlab
command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@@ -883,7 +865,7 @@ module Gitlab
before_script: ["pwd"],
rspec: { script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@@ -910,7 +892,7 @@ module Gitlab
before_script: ["pwd"],
rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@@ -934,9 +916,9 @@ module Gitlab
end
describe 'Variables' do
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
- subject { config_processor.builds.first[:yaml_variables] }
+ let(:build_variables) { subject.builds.first[:yaml_variables] }
context 'when global variables are defined' do
let(:variables) do
@@ -952,7 +934,7 @@ module Gitlab
end
it 'returns global variables' do
- expect(subject).to contain_exactly(
+ expect(build_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
@@ -980,7 +962,7 @@ module Gitlab
let(:inherit) { }
it 'returns all unique variables' do
- expect(subject).to contain_exactly(
+ expect(build_variables).to contain_exactly(
{ key: 'VAR4', value: 'global4', public: true },
{ key: 'VAR3', value: 'global3', public: true },
{ key: 'VAR1', value: 'value1', public: true },
@@ -993,7 +975,7 @@ module Gitlab
let(:inherit) { { variables: false } }
it 'does not inherit variables' do
- expect(subject).to contain_exactly(
+ expect(build_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
@@ -1004,7 +986,7 @@ module Gitlab
let(:inherit) { { variables: %w[VAR1 VAR4] } }
it 'returns all unique variables and inherits only specified variables' do
- expect(subject).to contain_exactly(
+ expect(build_variables).to contain_exactly(
{ key: 'VAR4', value: 'global4', public: true },
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
@@ -1027,7 +1009,7 @@ module Gitlab
end
it 'returns job variables' do
- expect(subject).to contain_exactly(
+ expect(build_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
@@ -1040,11 +1022,7 @@ module Gitlab
%w(VAR1 value1 VAR2 value2)
end
- it 'raises error' do
- expect { subject }
- .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- /jobs:rspec:variables config should be a hash of key value pairs/)
- end
+ it_behaves_like 'returns errors', /jobs:rspec:variables config should be a hash of key value pairs/
end
context 'when variables key defined but value not specified' do
@@ -1057,8 +1035,8 @@ module Gitlab
# When variables config is empty, we assume this is a valid
# configuration, see issue #18775
#
- expect(subject).to be_an_instance_of(Array)
- expect(subject).to be_empty
+ expect(build_variables).to be_an_instance_of(Array)
+ expect(build_variables).to be_empty
end
end
end
@@ -1073,14 +1051,14 @@ module Gitlab
end
it 'returns empty array' do
- expect(subject).to be_an_instance_of(Array)
- expect(subject).to be_empty
+ expect(build_variables).to be_an_instance_of(Array)
+ expect(build_variables).to be_empty
end
end
end
context 'when using `extends`' do
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
subject { config_processor.builds.first }
@@ -1142,31 +1120,25 @@ module Gitlab
}
end
- subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config), opts) }
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config), opts).execute }
context "when validating a ci config file with no project context" do
context "when a single string is provided" do
let(:include_content) { "/local.gitlab-ci.yml" }
- it "returns a validation error" do
- expect { subject }.to raise_error /does not have project/
- end
+ it_behaves_like 'returns errors', /does not have project/
end
context "when an array is provided" do
let(:include_content) { ["/local.gitlab-ci.yml"] }
- it "returns a validation error" do
- expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, /does not have project/)
- end
+ it_behaves_like 'returns errors', /does not have project/
end
context "when an array of wrong keyed object is provided" do
let(:include_content) { [{ yolo: "/local.gitlab-ci.yml" }] }
- it "returns a validation error" do
- expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
- end
+ it_behaves_like 'returns errors', /needs to match exactly one accessor/
end
context "when an array of mixed typed objects is provided" do
@@ -1185,17 +1157,13 @@ module Gitlab
body: 'prepare: { script: ls -al }')
end
- it "does not return any error" do
- expect { subject }.not_to raise_error
- end
+ it { is_expected.to be_valid }
end
context "when the include type is incorrect" do
let(:include_content) { { name: "/local.gitlab-ci.yml" } }
- it "returns an invalid configuration error" do
- expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
- end
+ it_behaves_like 'returns errors', /needs to match exactly one accessor/
end
end
@@ -1210,18 +1178,11 @@ module Gitlab
.and_return(YAML.dump({ job1: { script: 'hello' } }))
end
- it "does not return an error" do
- expect { subject }.not_to raise_error
- end
+ it { is_expected.to be_valid }
end
context "when the included internal file is not present" do
- it "returns an error with missing file details" do
- expect { subject }.to raise_error(
- Gitlab::Ci::YamlProcessor::ValidationError,
- "Local file `#{include_content}` does not exist!"
- )
- end
+ it_behaves_like 'returns errors', "Local file `/local.gitlab-ci.yml` does not exist!"
end
end
end
@@ -1233,7 +1194,7 @@ module Gitlab
rspec: { script: 'rspec', when: when_state }
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
@@ -1243,13 +1204,14 @@ module Gitlab
context 'delayed' do
context 'with start_in' do
- it 'creates one build and sets when:' do
- config = YAML.dump({
+ let(:config) do
+ YAML.dump({
rspec: { script: 'rspec', when: 'delayed', start_in: '1 hour' }
})
+ end
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
- builds = config_processor.stage_builds_attributes("test")
+ it 'creates one build and sets when:' do
+ builds = subject.stage_builds_attributes("test")
expect(builds.size).to eq(1)
expect(builds.first[:when]).to eq('delayed')
@@ -1258,15 +1220,13 @@ module Gitlab
end
context 'without start_in' do
- it 'raises an error' do
- config = YAML.dump({
+ let(:config) do
+ YAML.dump({
rspec: { script: 'rspec', when: 'delayed' }
})
-
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(YamlProcessor::ValidationError, /start in should be a duration/)
end
+
+ it_behaves_like 'returns errors', /start in should be a duration/
end
end
end
@@ -1278,7 +1238,7 @@ module Gitlab
variables: { 'VAR1' => 1 } })
end
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
let(:builds) { config_processor.stage_builds_attributes('test') }
context 'when job is parallelized' do
@@ -1377,16 +1337,13 @@ module Gitlab
describe 'cache' do
context 'when cache definition has unknown keys' do
- it 'raises relevant validation error' do
- config = YAML.dump(
+ let(:config) do
+ YAML.dump(
{ cache: { untracked: true, invalid: 'key' },
rspec: { script: 'rspec' } })
-
- expect { Gitlab::Ci::YamlProcessor.new(config) }.to raise_error(
- Gitlab::Ci::YamlProcessor::ValidationError,
- 'cache config contains unknown keys: invalid'
- )
end
+
+ it_behaves_like 'returns errors', 'cache config contains unknown keys: invalid'
end
it "returns cache when defined globally" do
@@ -1397,7 +1354,7 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
@@ -1419,7 +1376,7 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
@@ -1438,7 +1395,7 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
@@ -1461,7 +1418,7 @@ module Gitlab
}
)
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
@@ -1484,7 +1441,7 @@ module Gitlab
}
)
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes('test').size).to eq(1)
expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
@@ -1504,7 +1461,7 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
@@ -1534,7 +1491,7 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
@@ -1570,7 +1527,7 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
@@ -1586,7 +1543,7 @@ module Gitlab
}
})
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
@@ -1594,17 +1551,19 @@ module Gitlab
end
end
- it "gracefully handles errors in artifacts type" do
- config = <<~YAML
- test:
- script:
- - echo "Hello world"
- artifacts:
- - paths:
- - test/
- YAML
+ context 'when artifacts syntax is wrong' do
+ let(:config) do
+ <<~YAML
+ test:
+ script:
+ - echo "Hello world"
+ artifacts:
+ - paths:
+ - test/
+ YAML
+ end
- expect { described_class.new(config) }.to raise_error(described_class::ValidationError)
+ it_behaves_like 'returns errors', 'jobs:test:artifacts config should be a hash'
end
it 'populates a build options with complete artifacts configuration' do
@@ -1620,14 +1579,14 @@ module Gitlab
- my/test/something
YAML
- attributes = Gitlab::Ci::YamlProcessor.new(config).build_attributes('test')
+ attributes = Gitlab::Ci::YamlProcessor.new(config).execute.build_attributes('test')
expect(attributes.dig(*%i[options artifacts exclude])).to eq(%w[my/test/something])
end
end
describe "release" do
- let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+ let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:config) do
{
stages: %w[build test release],
@@ -1672,8 +1631,9 @@ module Gitlab
}
end
- let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
- let(:builds) { processor.stage_builds_attributes('deploy') }
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
+
+ let(:builds) { subject.stage_builds_attributes('deploy') }
context 'when a production environment is specified' do
let(:environment) { 'production' }
@@ -1723,18 +1683,13 @@ module Gitlab
context 'is not a string' do
let(:environment) { 1 }
- it 'raises error' do
- expect { builds }.to raise_error(
- 'jobs:deploy_to_production:environment config should be a hash or a string')
- end
+ it_behaves_like 'returns errors', 'jobs:deploy_to_production:environment config should be a hash or a string'
end
context 'is not a valid string' do
let(:environment) { 'production:staging' }
- it 'raises error' do
- expect { builds }.to raise_error("jobs:deploy_to_production:environment name #{Gitlab::Regex.environment_name_regex_message}")
- end
+ it_behaves_like 'returns errors', "jobs:deploy_to_production:environment name #{Gitlab::Regex.environment_name_regex_message}"
end
context 'when on_stop is specified' do
@@ -1753,33 +1708,25 @@ module Gitlab
context 'without matching job' do
let(:close_review) { nil }
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review is not defined')
- end
+ it_behaves_like 'returns errors', 'review job: on_stop job close_review is not defined'
end
context 'with close job without environment' do
let(:close_review) { { stage: 'deploy', script: 'test' } }
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review does not have environment defined')
- end
+ it_behaves_like 'returns errors', 'review job: on_stop job close_review does not have environment defined'
end
context 'with close job for different environment' do
let(:close_review) { { stage: 'deploy', script: 'test', environment: 'production' } }
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review have different environment name')
- end
+ it_behaves_like 'returns errors', 'review job: on_stop job close_review have different environment name'
end
context 'with close job without stop action' do
let(:close_review) { { stage: 'deploy', script: 'test', environment: { name: 'review' } } }
- it 'raises error' do
- expect { builds }.to raise_error('review job: on_stop job close_review needs to have action stop defined')
- end
+ it_behaves_like 'returns errors', 'review job: on_stop job close_review needs to have action stop defined'
end
end
end
@@ -1794,8 +1741,9 @@ module Gitlab
}
end
- let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
- let(:builds) { processor.stage_builds_attributes('deploy') }
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
+
+ let(:builds) { subject.stage_builds_attributes('deploy') }
context 'when no timeout was provided' do
it 'does not include job_timeout' do
@@ -1809,9 +1757,7 @@ module Gitlab
config[:deploy_to_production][:timeout] = 'not-a-number'
end
- it 'raises an error for invalid number' do
- expect { builds }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:deploy_to_production:timeout config should be a duration')
- end
+ it_behaves_like 'returns errors', 'jobs:deploy_to_production:timeout config should be a duration'
end
context 'when some valid timeout was provided' do
@@ -1837,36 +1783,36 @@ module Gitlab
}
end
- subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'no dependencies' do
let(:dependencies) { }
- it { expect { subject }.not_to raise_error }
+ it { is_expected.to be_valid }
end
context 'dependencies to builds' do
let(:dependencies) { %w(build1 build2) }
- it { expect { subject }.not_to raise_error }
+ it { is_expected.to be_valid }
end
context 'dependencies to builds defined as symbols' do
let(:dependencies) { [:build1, :build2] }
- it { expect { subject }.not_to raise_error }
+ it { is_expected.to be_valid }
end
context 'undefined dependency' do
let(:dependencies) { ['undefined'] }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: undefined dependency: undefined') }
+ it_behaves_like 'returns errors', 'test1 job: undefined dependency: undefined'
end
context 'dependencies to deploy' do
let(:dependencies) { ['deploy'] }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: dependency deploy is not defined in prior stages') }
+ it_behaves_like 'returns errors', 'test1 job: dependency deploy is not defined in prior stages'
end
context 'when a job depends on another job that references a not-yet defined stage' do
@@ -1891,7 +1837,7 @@ module Gitlab
}
end
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, /is not defined in prior stages/) }
+ it_behaves_like 'returns errors', /is not defined in prior stages/
end
end
@@ -1910,10 +1856,10 @@ module Gitlab
}
end
- subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'no needs' do
- it { expect { subject }.not_to raise_error }
+ it { is_expected.to be_valid }
end
context 'needs two builds' do
@@ -2053,20 +1999,20 @@ module Gitlab
context 'undefined need' do
let(:needs) { ['undefined'] }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: undefined need: undefined') }
+ it_behaves_like 'returns errors', 'test1 job: undefined need: undefined'
end
context 'needs to deploy' do
let(:needs) { ['deploy'] }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: need deploy is not defined in prior stages') }
+ it_behaves_like 'returns errors', 'test1 job: need deploy is not defined in prior stages'
end
context 'needs and dependencies that are mismatching' do
let(:needs) { %w(build1) }
let(:dependencies) { %w(build2) }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies the build2 should be part of needs') }
+ it_behaves_like 'returns errors', 'jobs:test1 dependencies the build2 should be part of needs'
end
context 'needs with a Hash type and dependencies with a string type that are mismatching' do
@@ -2079,33 +2025,33 @@ module Gitlab
let(:dependencies) { %w(build3) }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies the build3 should be part of needs') }
+ it_behaves_like 'returns errors', 'jobs:test1 dependencies the build3 should be part of needs'
end
context 'needs with an array type and dependency with a string type' do
let(:needs) { %w(build1) }
let(:dependencies) { 'deploy' }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies should be an array of strings') }
+ it_behaves_like 'returns errors', 'jobs:test1 dependencies should be an array of strings'
end
context 'needs with a string type and dependency with an array type' do
let(:needs) { 'build1' }
let(:dependencies) { %w(deploy) }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1:needs config can only be a hash or an array') }
+ it_behaves_like 'returns errors', 'jobs:test1:needs config can only be a hash or an array'
end
context 'needs with a Hash type and dependency with a string type' do
let(:needs) { { job: 'build1' } }
let(:dependencies) { 'deploy' }
- it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies should be an array of strings') }
+ it_behaves_like 'returns errors', 'jobs:test1 dependencies should be an array of strings'
end
end
context 'with when/rules conflict' do
- subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
+ subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
let(:config) do
{
@@ -2121,9 +2067,7 @@ module Gitlab
}
end
- it 'raises no exceptions' do
- expect { subject }.not_to raise_error
- end
+ it { is_expected.to be_valid }
it 'returns all jobs regardless of their inclusion' do
expect(subject.builds.count).to eq(config.keys.count)
@@ -2141,9 +2085,7 @@ module Gitlab
}
end
- it 'raises a ValidationError' do
- expect { subject }.to raise_error(YamlProcessor::ValidationError, /may not be used with `rules`: when/)
- end
+ it_behaves_like 'returns errors', /may not be used with `rules`: when/
end
context 'used with job-level when:delayed' do
@@ -2159,14 +2101,12 @@ module Gitlab
}
end
- it 'raises a ValidationError' do
- expect { subject }.to raise_error(YamlProcessor::ValidationError, /may not be used with `rules`: when, start_in/)
- end
+ it_behaves_like 'returns errors', /may not be used with `rules`: when, start_in/
end
end
describe "Hidden jobs" do
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
subject { config_processor.stage_builds_attributes("test") }
@@ -2213,7 +2153,7 @@ module Gitlab
end
describe "YAML Alias/Anchor" do
- let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config).execute }
subject { config_processor.stage_builds_attributes("build") }
@@ -2310,7 +2250,7 @@ module Gitlab
})
end
- it { expect { subject }.not_to raise_error }
+ it { is_expected.to be_valid }
end
context 'when job is not specified specified while artifact is' do
@@ -2323,11 +2263,7 @@ module Gitlab
})
end
- it do
- expect { subject }.to raise_error(
- described_class::ValidationError,
- /include config must specify the job where to fetch the artifact from/)
- end
+ it_behaves_like 'returns errors', /include config must specify the job where to fetch the artifact from/
end
context 'when include is a string' do
@@ -2343,376 +2279,323 @@ module Gitlab
})
end
- it { expect { subject }.not_to raise_error }
+ it { is_expected.to be_valid }
end
end
describe "Error handling" do
- it "fails to parse YAML" do
- expect do
- Gitlab::Ci::YamlProcessor.new("invalid: yaml: test")
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
+ subject { described_class.new(config).execute }
+
+ context 'when YAML syntax is invalid' do
+ let(:config) { 'invalid: yaml: test' }
+
+ it_behaves_like 'returns errors', /mapping values are not allowed/
end
- it "indicates that object is invalid" do
- expect do
- Gitlab::Ci::YamlProcessor.new("invalid_yaml")
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
+ context 'when object is invalid' do
+ let(:config) { 'invalid_yaml' }
+
+ it_behaves_like 'returns errors', /Invalid configuration format/
end
- it "returns errors if tags parameter is invalid" do
- config = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:tags config should be an array of strings")
+ context 'returns errors if tags parameter is invalid' do
+ let(:config) { YAML.dump({ rspec: { script: "test", tags: "mysql" } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:tags config should be an array of strings'
end
- it "returns errors if before_script parameter is invalid" do
- config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "before_script config should be an array containing strings and arrays of strings")
+ context 'returns errors if before_script parameter is invalid' do
+ let(:config) { YAML.dump({ before_script: "bundle update", rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'before_script config should be an array containing strings and arrays of strings'
end
- it "returns errors if job before_script parameter is not an array of strings" do
- config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array containing strings and arrays of strings")
+ context 'returns errors if job before_script parameter is not an array of strings' do
+ let(:config) { YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:before_script config should be an array containing strings and arrays of strings'
end
- it "returns errors if job before_script parameter is multi-level nested array of strings" do
- config = YAML.dump({ rspec: { script: "test", before_script: [["ls", ["pwd"]], "test"] } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array containing strings and arrays of strings")
+ context 'returns errors if job before_script parameter is multi-level nested array of strings' do
+ let(:config) { YAML.dump({ rspec: { script: "test", before_script: [["ls", ["pwd"]], "test"] } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:before_script config should be an array containing strings and arrays of strings'
end
- it "returns errors if after_script parameter is invalid" do
- config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "after_script config should be an array containing strings and arrays of strings")
+ context 'returns errors if after_script parameter is invalid' do
+ let(:config) { YAML.dump({ after_script: "bundle update", rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'after_script config should be an array containing strings and arrays of strings'
end
- it "returns errors if job after_script parameter is not an array of strings" do
- config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array containing strings and arrays of strings")
+ context 'returns errors if job after_script parameter is not an array of strings' do
+ let(:config) { YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:after_script config should be an array containing strings and arrays of strings'
end
- it "returns errors if job after_script parameter is multi-level nested array of strings" do
- config = YAML.dump({ rspec: { script: "test", after_script: [["ls", ["pwd"]], "test"] } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array containing strings and arrays of strings")
+ context 'returns errors if job after_script parameter is multi-level nested array of strings' do
+ let(:config) { YAML.dump({ rspec: { script: "test", after_script: [["ls", ["pwd"]], "test"] } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:after_script config should be an array containing strings and arrays of strings'
end
- it "returns errors if image parameter is invalid" do
- config = YAML.dump({ image: ["test"], rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "image config should be a hash or a string")
+ context 'returns errors if image parameter is invalid' do
+ let(:config) { YAML.dump({ image: ["test"], rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'image config should be a hash or a string'
end
- it "returns errors if job name is blank" do
- config = YAML.dump({ '' => { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:job name can't be blank")
+ context 'returns errors if job name is blank' do
+ let(:config) { YAML.dump({ '' => { script: "test" } }) }
+
+ it_behaves_like 'returns errors', "jobs:job name can't be blank"
end
- it "returns errors if job name is non-string" do
- config = YAML.dump({ 10 => { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:10 name should be a symbol")
+ context 'returns errors if job name is non-string' do
+ let(:config) { YAML.dump({ 10 => { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'jobs:10 name should be a symbol'
end
- it "returns errors if job image parameter is invalid" do
- config = YAML.dump({ rspec: { script: "test", image: ["test"] } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:image config should be a hash or a string")
+ context 'returns errors if job image parameter is invalid' do
+ let(:config) { YAML.dump({ rspec: { script: "test", image: ["test"] } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:image config should be a hash or a string'
end
- it "returns errors if services parameter is not an array" do
- config = YAML.dump({ services: "test", rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "services config should be a array")
+ context 'returns errors if services parameter is not an array' do
+ let(:config) { YAML.dump({ services: "test", rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'services config should be a array'
end
- it "returns errors if services parameter is not an array of strings" do
- config = YAML.dump({ services: [10, "test"], rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "services:service config should be a hash or a string")
+ context 'returns errors if services parameter is not an array of strings' do
+ let(:config) { YAML.dump({ services: [10, "test"], rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'services:service config should be a hash or a string'
end
- it "returns errors if job services parameter is not an array" do
- config = YAML.dump({ rspec: { script: "test", services: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:services config should be a array")
+ context 'returns errors if job services parameter is not an array' do
+ let(:config) { YAML.dump({ rspec: { script: "test", services: "test" } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:services config should be a array'
end
- it "returns errors if job services parameter is not an array of strings" do
- config = YAML.dump({ rspec: { script: "test", services: [10, "test"] } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:services:service config should be a hash or a string")
+ context 'returns errors if job services parameter is not an array of strings' do
+ let(:config) { YAML.dump({ rspec: { script: "test", services: [10, "test"] } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:services:service config should be a hash or a string'
end
- it "returns error if job configuration is invalid" do
- config = YAML.dump({ extra: "bundle update" })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "root config contains unknown keys: extra")
+ context 'returns error if job configuration is invalid' do
+ let(:config) { YAML.dump({ extra: "bundle update" }) }
+
+ it_behaves_like 'returns errors', 'jobs extra config should implement a script: or a trigger: keyword'
end
- it "returns errors if services configuration is not correct" do
- config = YAML.dump({ extra: { script: 'rspec', services: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra:services config should be a array")
+ context 'returns errors if services configuration is not correct' do
+ let(:config) { YAML.dump({ extra: { script: 'rspec', services: "test" } }) }
+
+ it_behaves_like 'returns errors', 'jobs:extra:services config should be a array'
end
- it "returns errors if there are no jobs defined" do
- config = YAML.dump({ before_script: ["bundle update"] })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
+ context 'returns errors if there are no jobs defined' do
+ let(:config) { YAML.dump({ before_script: ["bundle update"] }) }
+
+ it_behaves_like 'returns errors', 'jobs config should contain at least one visible job'
end
- it "returns errors if the job script is not defined" do
- config = YAML.dump({ rspec: { before_script: "test" } })
+ context 'returns errors if the job script is not defined' do
+ let(:config) { YAML.dump({ rspec: { before_script: "test" } }) }
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec script can't be blank")
+ it_behaves_like 'returns errors', 'jobs rspec config should implement a script: or a trigger: keyword'
end
- it "returns errors if there are no visible jobs defined" do
- config = YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
+ context 'returns errors if there are no visible jobs defined' do
+ let(:config) { YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } }) }
+
+ it_behaves_like 'returns errors', 'jobs config should contain at least one visible job'
end
- it "returns errors if job allow_failure parameter is not an boolean" do
- config = YAML.dump({ rspec: { script: "test", allow_failure: "string" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec allow failure should be a boolean value")
+ context 'returns errors if job allow_failure parameter is not an boolean' do
+ let(:config) { YAML.dump({ rspec: { script: "test", allow_failure: "string" } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec allow failure should be a boolean value'
end
- it "returns errors if job stage is not a string" do
- config = YAML.dump({ rspec: { script: "test", type: 1 } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:type config should be a string")
+ context 'returns errors if job stage is not a string' do
+ let(:config) { YAML.dump({ rspec: { script: "test", type: 1 } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:type config should be a string'
end
- it "returns errors if job stage is not a pre-defined stage" do
- config = YAML.dump({ rspec: { script: "test", type: "acceptance" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post")
+ context 'returns errors if job stage is not a pre-defined stage' do
+ let(:config) { YAML.dump({ rspec: { script: "test", type: "acceptance" } }) }
+
+ it_behaves_like 'returns errors', 'rspec job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post'
end
- it "returns errors if job stage is not a defined stage" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: chosen stage does not exist; available stages are .pre, build, test, .post")
+ context 'returns errors if job stage is not a defined stage' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } }) }
+
+ it_behaves_like 'returns errors', 'rspec job: chosen stage does not exist; available stages are .pre, build, test, .post'
end
- it "returns errors if stages is not an array" do
- config = YAML.dump({ stages: "test", rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
+ context 'returns errors if stages is not an array' do
+ let(:config) { YAML.dump({ stages: "test", rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'stages config should be an array of strings'
end
- it "returns errors if stages is not an array of strings" do
- config = YAML.dump({ stages: [true, "test"], rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
+ context 'returns errors if stages is not an array of strings' do
+ let(:config) { YAML.dump({ stages: [true, "test"], rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'stages config should be an array of strings'
end
- it "returns errors if variables is not a map" do
- config = YAML.dump({ variables: "test", rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
+ context 'returns errors if variables is not a map' do
+ let(:config) { YAML.dump({ variables: "test", rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'variables config should be a hash of key value pairs'
end
- it "returns errors if variables is not a map of key-value strings" do
- config = YAML.dump({ variables: { test: false }, rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
+ context 'returns errors if variables is not a map of key-value strings' do
+ let(:config) { YAML.dump({ variables: { test: false }, rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'variables config should be a hash of key value pairs'
end
- it "returns errors if job when is not on_success, on_failure or always" do
- config = YAML.dump({ rspec: { script: "test", when: 1 } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec when should be one of: #{Gitlab::Ci::Config::Entry::Job::ALLOWED_WHEN.join(', ')}")
+ context 'returns errors if job when is not on_success, on_failure or always' do
+ let(:config) { YAML.dump({ rspec: { script: "test", when: 1 } }) }
+
+ it_behaves_like 'returns errors', "jobs:rspec when should be one of: #{Gitlab::Ci::Config::Entry::Job::ALLOWED_WHEN.join(', ')}"
end
- it "returns errors if job artifacts:name is not an a string" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { name: 1 } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts name should be a string")
+ context 'returns errors if job artifacts:name is not an a string' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { name: 1 } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:artifacts name should be a string'
end
- it "returns errors if job artifacts:when is not an a predefined value" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { when: 1 } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts when should be on_success, on_failure or always")
+ context 'returns errors if job artifacts:when is not an a predefined value' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { when: 1 } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:artifacts when should be on_success, on_failure or always'
end
- it "returns errors if job artifacts:expire_in is not an a string" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: 1 } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
+ context 'returns errors if job artifacts:expire_in is not an a string' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: 1 } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:artifacts expire in should be a duration'
end
- it "returns errors if job artifacts:expire_in is not an a valid duration" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
+ context 'returns errors if job artifacts:expire_in is not an a valid duration' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:artifacts expire in should be a duration'
end
- it "returns errors if job artifacts:untracked is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { untracked: "string" } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts untracked should be a boolean value")
+ context 'returns errors if job artifacts:untracked is not an array of strings' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { untracked: "string" } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:artifacts untracked should be a boolean value'
end
- it "returns errors if job artifacts:paths is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { paths: "string" } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts paths should be an array of strings")
+ context 'returns errors if job artifacts:paths is not an array of strings' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { paths: "string" } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:artifacts paths should be an array of strings'
end
- it "returns errors if cache:untracked is not an array of strings" do
- config = YAML.dump({ cache: { untracked: "string" }, rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:untracked config should be a boolean value")
+ context 'returns errors if cache:untracked is not an array of strings' do
+ let(:config) { YAML.dump({ cache: { untracked: "string" }, rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'cache:untracked config should be a boolean value'
end
- it "returns errors if cache:paths is not an array of strings" do
- config = YAML.dump({ cache: { paths: "string" }, rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:paths config should be an array of strings")
+ context 'returns errors if cache:paths is not an array of strings' do
+ let(:config) { YAML.dump({ cache: { paths: "string" }, rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', 'cache:paths config should be an array of strings'
end
- it "returns errors if cache:key is not a string" do
- config = YAML.dump({ cache: { key: 1 }, rspec: { script: "test" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:key should be a hash, a string or a symbol")
+ context 'returns errors if cache:key is not a string' do
+ let(:config) { YAML.dump({ cache: { key: 1 }, rspec: { script: "test" } }) }
+
+ it_behaves_like 'returns errors', "cache:key should be a hash, a string or a symbol"
end
- it "returns errors if job cache:key is not an a string" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: 1 } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:key should be a hash, a string or a symbol")
+ context 'returns errors if job cache:key is not an a string' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: 1 } } }) }
+
+ it_behaves_like 'returns errors', "jobs:rspec:cache:key should be a hash, a string or a symbol"
end
- it 'returns errors if job cache:key:files is not an array of strings' do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [1] } } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key:files config should be an array of strings')
+ context 'returns errors if job cache:key:files is not an array of strings' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [1] } } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:cache:key:files config should be an array of strings'
end
- it 'returns errors if job cache:key:files is an empty array' do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [] } } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key:files config requires at least 1 item')
+ context 'returns errors if job cache:key:files is an empty array' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { files: [] } } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:cache:key:files config requires at least 1 item'
end
- it 'returns errors if job defines only cache:key:prefix' do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 'prefix-key' } } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key config missing required keys: files')
+ context 'returns errors if job defines only cache:key:prefix' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 'prefix-key' } } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:cache:key config missing required keys: files'
end
- it 'returns errors if job cache:key:prefix is not an a string' do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 1, files: ['file'] } } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:rspec:cache:key:prefix config should be a string or symbol')
+ context 'returns errors if job cache:key:prefix is not an a string' do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: { prefix: 1, files: ['file'] } } } }) }
+
+ it_behaves_like 'returns errors', 'jobs:rspec:cache:key:prefix config should be a string or symbol'
end
- it "returns errors if job cache:untracked is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { untracked: "string" } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:untracked config should be a boolean value")
+ context "returns errors if job cache:untracked is not an array of strings" do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { untracked: "string" } } }) }
+
+ it_behaves_like 'returns errors', "jobs:rspec:cache:untracked config should be a boolean value"
end
- it "returns errors if job cache:paths is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { paths: "string" } } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:paths config should be an array of strings")
+ context "returns errors if job cache:paths is not an array of strings" do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { paths: "string" } } }) }
+
+ it_behaves_like 'returns errors', "jobs:rspec:cache:paths config should be an array of strings"
end
- it "returns errors if job dependencies is not an array of strings" do
- config = YAML.dump({ types: %w(build test), rspec: { script: "test", dependencies: "string" } })
- expect do
- Gitlab::Ci::YamlProcessor.new(config)
- end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec dependencies should be an array of strings")
+ context "returns errors if job dependencies is not an array of strings" do
+ let(:config) { YAML.dump({ types: %w(build test), rspec: { script: "test", dependencies: "string" } }) }
+
+ it_behaves_like 'returns errors', "jobs:rspec dependencies should be an array of strings"
end
- it 'returns errors if pipeline variables expression policy is invalid' do
- config = YAML.dump({ rspec: { script: 'test', only: { variables: ['== null'] } } })
+ context 'returns errors if pipeline variables expression policy is invalid' do
+ let(:config) { YAML.dump({ rspec: { script: 'test', only: { variables: ['== null'] } } }) }
- expect { Gitlab::Ci::YamlProcessor.new(config) }
- .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:only variables invalid expression syntax')
+ it_behaves_like 'returns errors', 'jobs:rspec:only variables invalid expression syntax'
end
- it 'returns errors if pipeline changes policy is invalid' do
- config = YAML.dump({ rspec: { script: 'test', only: { changes: [1] } } })
+ context 'returns errors if pipeline changes policy is invalid' do
+ let(:config) { YAML.dump({ rspec: { script: 'test', only: { changes: [1] } } }) }
- expect { Gitlab::Ci::YamlProcessor.new(config) }
- .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:only changes should be an array of strings')
+ it_behaves_like 'returns errors', 'jobs:rspec:only changes should be an array of strings'
end
- it 'returns errors if extended hash configuration is invalid' do
- config = YAML.dump({ rspec: { extends: 'something', script: 'test' } })
+ context 'returns errors if extended hash configuration is invalid' do
+ let(:config) { YAML.dump({ rspec: { extends: 'something', script: 'test' } }) }
- expect { Gitlab::Ci::YamlProcessor.new(config) }
- .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'rspec: unknown keys in `extends` (something)')
+ it_behaves_like 'returns errors', 'rspec: unknown keys in `extends` (something)'
end
- it 'returns errors if parallel is invalid' do
- config = YAML.dump({ rspec: { parallel: 'test', script: 'test' } })
+ context 'returns errors if parallel is invalid' do
+ let(:config) { YAML.dump({ rspec: { parallel: 'test', script: 'test' } }) }
- expect { Gitlab::Ci::YamlProcessor.new(config) }
- .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
- 'jobs:rspec:parallel should be an integer or a hash')
+ it_behaves_like 'returns errors', 'jobs:rspec:parallel should be an integer or a hash'
end
end
@@ -2750,8 +2633,8 @@ module Gitlab
end
end
- describe '.new_with_validation_errors' do
- subject { Gitlab::Ci::YamlProcessor.new_with_validation_errors(content) }
+ describe '#execute' do
+ subject { Gitlab::Ci::YamlProcessor.new(content).execute }
context 'when the YAML could not be parsed' do
let(:content) { YAML.dump('invalid: yaml: test') }
@@ -2759,7 +2642,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['Invalid configuration format'])
- expect(subject.config).to be_blank
end
end
@@ -2769,7 +2651,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['jobs:rspec:tags config should be an array of strings'])
- expect(subject.config).to be_blank
end
end
@@ -2781,7 +2662,6 @@ module Gitlab
expect(subject.errors).to contain_exactly(
'jobs:rspec config contains unknown keys: bad_tags',
'jobs:rspec rules should be an array of hashes')
- expect(subject.config).to be_blank
end
end
@@ -2791,7 +2671,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['Please provide content of .gitlab-ci.yml'])
- expect(subject.config).to be_blank
end
end
@@ -2801,7 +2680,6 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['Unknown alias: bad_alias'])
- expect(subject.config).to be_blank
end
end
@@ -2811,7 +2689,7 @@ module Gitlab
it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(true)
expect(subject.errors).to be_empty
- expect(subject.config).to be_present
+ expect(subject.builds).to be_present
end
end
end
diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
index 47b2cf5dc4a..efdfc0a980b 100644
--- a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
+++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
@@ -3,12 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Cleanup::OrphanLfsFileReferences do
+ include ProjectForksHelper
+
let(:null_logger) { Logger.new('/dev/null') }
let(:project) { create(:project, :repository, lfs_enabled: true) }
let(:lfs_object) { create(:lfs_object) }
let!(:invalid_reference) { create(:lfs_objects_project, project: project, lfs_object: lfs_object) }
+ subject(:service) { described_class.new(project, logger: null_logger, dry_run: dry_run) }
+
before do
allow(null_logger).to receive(:info)
@@ -21,25 +25,66 @@ RSpec.describe Gitlab::Cleanup::OrphanLfsFileReferences do
end
context 'dry run' do
+ let(:dry_run) { true }
+
it 'prints messages and does not delete references' do
expect(null_logger).to receive(:info).with("[DRY RUN] Looking for orphan LFS files for project #{project.name_with_namespace}")
expect(null_logger).to receive(:info).with("[DRY RUN] Found invalid references: 1")
- expect { described_class.new(project, logger: null_logger).run! }
- .not_to change { project.lfs_objects.count }
+ expect { service.run! }.not_to change { project.lfs_objects.count }
end
end
context 'regular run' do
+ let(:dry_run) { false }
+
it 'prints messages and deletes invalid reference' do
expect(null_logger).to receive(:info).with("Looking for orphan LFS files for project #{project.name_with_namespace}")
expect(null_logger).to receive(:info).with("Removed invalid references: 1")
expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:lfs_objects_size])
- expect { described_class.new(project, logger: null_logger, dry_run: false).run! }
- .to change { project.lfs_objects.count }.from(2).to(1)
+ expect { service.run! }.to change { project.lfs_objects.count }.from(2).to(1)
expect(LfsObjectsProject.exists?(invalid_reference.id)).to be_falsey
end
+
+ context 'LFS object is in design repository' do
+ before do
+ expect(project.design_repository).to receive(:exists?).and_return(true)
+
+ stub_lfs_pointers(project.design_repository, lfs_object.oid)
+ end
+
+ it 'is not removed' do
+ expect { service.run! }.not_to change { project.lfs_objects.count }
+ end
+ end
+
+ context 'LFS object is in wiki repository' do
+ before do
+ expect(project.wiki.repository).to receive(:exists?).and_return(true)
+
+ stub_lfs_pointers(project.wiki.repository, lfs_object.oid)
+ end
+
+ it 'is not removed' do
+ expect { service.run! }.not_to change { project.lfs_objects.count }
+ end
+ end
+ end
+
+ context 'LFS for project snippets' do
+ let(:snippet) { create(:project_snippet) }
+
+ it 'is disabled' do
+ # Support project snippets here before enabling LFS for them
+ expect(snippet.repository.lfs_enabled?).to be_falsy
+ end
+ end
+
+ def stub_lfs_pointers(repo, *oids)
+ expect(repo.gitaly_blob_client)
+ .to receive(:get_all_lfs_pointers)
+ .and_return(oids.map { |oid| OpenStruct.new(lfs_oid: oid) })
end
end
diff --git a/spec/lib/gitlab/conan_token_spec.rb b/spec/lib/gitlab/conan_token_spec.rb
index b17f2eaa8d8..be1d3e757f5 100644
--- a/spec/lib/gitlab/conan_token_spec.rb
+++ b/spec/lib/gitlab/conan_token_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::ConanToken do
it 'returns the encoded JWT' do
allow(SecureRandom).to receive(:uuid).and_return('u-u-i-d')
- Timecop.freeze do
+ freeze_time do
jwt = build_jwt(access_token_id: 123, user_id: 456)
token = described_class.new(access_token_id: 123, user_id: 456)
diff --git a/spec/lib/gitlab/consul/internal_spec.rb b/spec/lib/gitlab/consul/internal_spec.rb
new file mode 100644
index 00000000000..5889dd8b41d
--- /dev/null
+++ b/spec/lib/gitlab/consul/internal_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Consul::Internal do
+ let(:api_url) { 'http://127.0.0.1:8500' }
+
+ let(:consul_settings) do
+ {
+ api_url: api_url
+ }
+ end
+
+ before do
+ stub_config(consul: consul_settings)
+ end
+
+ describe '.api_url' do
+ it 'returns correct value' do
+ expect(described_class.api_url).to eq(api_url)
+ end
+
+ context 'when consul setting is not present in gitlab.yml' do
+ before do
+ allow(Gitlab.config).to receive(:consul).and_raise(Settingslogic::MissingSetting)
+ end
+
+ it 'does not fail' do
+ expect(described_class.api_url).to be_nil
+ end
+ end
+ end
+
+ shared_examples 'handles failure response' do
+ it 'raises Gitlab::Consul::Internal::SocketError when SocketError is rescued' do
+ stub_consul_discover_prometheus.to_raise(::SocketError)
+
+ expect { subject }
+ .to raise_error(described_class::SocketError)
+ end
+
+ it 'raises Gitlab::Consul::Internal::SSLError when OpenSSL::SSL::SSLError is rescued' do
+ stub_consul_discover_prometheus.to_raise(OpenSSL::SSL::SSLError)
+
+ expect { subject }
+ .to raise_error(described_class::SSLError)
+ end
+
+ it 'raises Gitlab::Consul::Internal::ECONNREFUSED when Errno::ECONNREFUSED is rescued' do
+ stub_consul_discover_prometheus.to_raise(Errno::ECONNREFUSED)
+
+ expect { subject }
+ .to raise_error(described_class::ECONNREFUSED)
+ end
+
+ it 'raises Consul::Internal::UnexpectedResponseError when StandardError is rescued' do
+ stub_consul_discover_prometheus.to_raise(StandardError)
+
+ expect { subject }
+ .to raise_error(described_class::UnexpectedResponseError)
+ end
+
+ it 'raises Consul::Internal::UnexpectedResponseError when request returns 500' do
+ stub_consul_discover_prometheus.to_return(status: 500, body: '{ message: "FAIL!" }')
+
+ expect { subject }
+ .to raise_error(described_class::UnexpectedResponseError)
+ end
+
+ it 'raises Consul::Internal::UnexpectedResponseError when request returns non json data' do
+ stub_consul_discover_prometheus.to_return(status: 200, body: 'not json')
+
+ expect { subject }
+ .to raise_error(described_class::UnexpectedResponseError)
+ end
+ end
+
+ shared_examples 'returns nil given blank value of' do |input_symbol|
+ [nil, ''].each do |value|
+ let(input_symbol) { value }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '.discover_service' do
+ subject { described_class.discover_service(service_name: service_name) }
+
+ let(:service_name) { 'prometheus' }
+
+ it_behaves_like 'returns nil given blank value of', :api_url
+
+ it_behaves_like 'returns nil given blank value of', :service_name
+
+ context 'one service discovered' do
+ before do
+ stub_consul_discover_prometheus.to_return(status: 200, body: '[{"ServiceAddress":"prom.net","ServicePort":9090}]')
+ end
+
+ it 'returns the service address and port' do
+ is_expected.to eq(["prom.net", 9090])
+ end
+ end
+
+ context 'multiple services discovered' do
+ before do
+ stub_consul_discover_prometheus
+ .to_return(status: 200, body: '[{"ServiceAddress":"prom_1.net","ServicePort":9090},{"ServiceAddress":"prom.net","ServicePort":9090}]')
+ end
+
+ it 'uses the first service' do
+ is_expected.to eq(["prom_1.net", 9090])
+ end
+ end
+
+ it_behaves_like 'handles failure response'
+ end
+
+ describe '.discover_prometheus_server_address' do
+ subject { described_class.discover_prometheus_server_address }
+
+ before do
+ stub_consul_discover_prometheus
+ .to_return(status: 200, body: '[{"ServiceAddress":"prom.net","ServicePort":9090}]')
+ end
+
+ it 'returns the server address' do
+ is_expected.to eq('prom.net:9090')
+ end
+
+ it_behaves_like 'returns nil given blank value of', :api_url
+
+ it_behaves_like 'handles failure response'
+ end
+
+ def stub_consul_discover_prometheus
+ stub_request(:get, /v1\/catalog\/service\/prometheus/)
+ end
+end
diff --git a/spec/lib/gitlab/cycle_analytics/code_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/code_stage_spec.rb
index afab19de2ab..17104715580 100644
--- a/spec/lib/gitlab/cycle_analytics/code_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/code_stage_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::CycleAnalytics::CodeStage do
describe '#project_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::CycleAnalytics::CodeStage do
describe '#group_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index 246003cde84..e0a8e2c17a3 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -306,48 +306,6 @@ RSpec.describe 'cycle analytics events' do
end
end
- describe '#production_events', :sidekiq_might_not_need_inline do
- let(:stage) { :production }
- let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
-
- before do
- merge_merge_requests_closing_issue(user, project, context)
- deploy_master(user, project)
- end
-
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq(context.title)
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).to end_with('ago')
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(context.author.name)
- end
- end
-
def setup(context)
milestone = create(:milestone, project: project)
context.update(milestone: milestone)
diff --git a/spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb
index 9ec71e6ed72..c7ab2b9b84b 100644
--- a/spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/issue_stage_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::CycleAnalytics::IssueStage do
describe '#median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::CycleAnalytics::IssueStage do
describe '#group_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
@@ -87,7 +87,7 @@ RSpec.describe Gitlab::CycleAnalytics::IssueStage do
describe '#group_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
diff --git a/spec/lib/gitlab/cycle_analytics/permissions_spec.rb b/spec/lib/gitlab/cycle_analytics/permissions_spec.rb
index 3fd48993e5f..7650ff3cace 100644
--- a/spec/lib/gitlab/cycle_analytics/permissions_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/permissions_spec.rb
@@ -21,10 +21,6 @@ RSpec.describe Gitlab::CycleAnalytics::Permissions do
expect(subject[:staging]).to eq(false)
end
- it 'has no permissions to production stage' do
- expect(subject[:production]).to eq(false)
- end
-
it 'has no permissions to code stage' do
expect(subject[:code]).to eq(false)
end
@@ -55,10 +51,6 @@ RSpec.describe Gitlab::CycleAnalytics::Permissions do
expect(subject[:staging]).to eq(true)
end
- it 'has permissions to production stage' do
- expect(subject[:production]).to eq(true)
- end
-
it 'has permissions to code stage' do
expect(subject[:code]).to eq(true)
end
@@ -121,9 +113,5 @@ RSpec.describe Gitlab::CycleAnalytics::Permissions do
it 'has no permissions to issue stage' do
expect(subject[:issue]).to eq(false)
end
-
- it 'has no permissions to production stage' do
- expect(subject[:production]).to eq(false)
- end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb
index 66d00edacb7..2547c05c025 100644
--- a/spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/plan_stage_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::CycleAnalytics::PlanStage do
describe '#project_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::CycleAnalytics::PlanStage do
describe '#group_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
diff --git a/spec/lib/gitlab/cycle_analytics/production_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/production_stage_spec.rb
deleted file mode 100644
index 73b17194f72..00000000000
--- a/spec/lib/gitlab/cycle_analytics/production_stage_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::CycleAnalytics::ProductionStage do
- let(:stage_name) { 'Total' }
-
- it_behaves_like 'base stage'
-end
diff --git a/spec/lib/gitlab/cycle_analytics/review_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/review_stage_spec.rb
index cdd1cca6837..5593013740e 100644
--- a/spec/lib/gitlab/cycle_analytics/review_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/review_stage_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::CycleAnalytics::ReviewStage do
describe '#project_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
@@ -70,7 +70,7 @@ RSpec.describe Gitlab::CycleAnalytics::ReviewStage do
describe '#group_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index 9ece24074e7..719d4a69985 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -231,7 +231,7 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
context 'when `from` and `to` are within a day' do
it 'returns the number of deployments made on that day' do
- Timecop.freeze(Time.now) do
+ freeze_time do
create(:deployment, :success, project: project)
options[:from] = options[:to] = Time.now
diff --git a/spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb
index 69e42adb139..852f7041dc6 100644
--- a/spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/staging_stage_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::CycleAnalytics::StagingStage do
describe '#project_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
@@ -79,7 +79,7 @@ RSpec.describe Gitlab::CycleAnalytics::StagingStage do
describe '#group_median' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
diff --git a/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb b/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
index 9a207d32167..49ee6624260 100644
--- a/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/test_stage_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe Gitlab::CycleAnalytics::TestStage do
end
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'counts median from issues with metrics' do
diff --git a/spec/lib/gitlab/danger/changelog_spec.rb b/spec/lib/gitlab/danger/changelog_spec.rb
index 3c67e9ca8ea..2da60f4f8bd 100644
--- a/spec/lib/gitlab/danger/changelog_spec.rb
+++ b/spec/lib/gitlab/danger/changelog_spec.rb
@@ -16,20 +16,47 @@ RSpec.describe Gitlab::Danger::Changelog do
let(:fake_gitlab) { double('fake-gitlab', mr_labels: mr_labels, mr_json: mr_json) }
let(:changes_by_category) { nil }
+ let(:sanitize_mr_title) { nil }
let(:ee?) { false }
- let(:fake_helper) { double('fake-helper', changes_by_category: changes_by_category, ee?: ee?) }
+ let(:fake_helper) { double('fake-helper', changes_by_category: changes_by_category, sanitize_mr_title: sanitize_mr_title, ee?: ee?) }
let(:fake_danger) { new_fake_danger.include(described_class) }
subject(:changelog) { fake_danger.new(git: fake_git, gitlab: fake_gitlab, helper: fake_helper) }
- describe '#needed?' do
+ describe '#required?' do
+ subject { changelog.required? }
+
+ context 'added files contain a migration' do
+ [
+ 'db/migrate/20200000000000_new_migration.rb',
+ 'db/post_migrate/20200000000000_new_migration.rb'
+ ].each do |file_path|
+ let(:added_files) { [file_path] }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'added files do not contain a migration' do
+ [
+ 'app/models/model.rb',
+ 'app/assets/javascripts/file.js'
+ ].each do |file_path|
+ let(:added_files) { [file_path] }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ describe '#optional?' do
let(:category_with_changelog) { :backend }
let(:label_with_changelog) { 'frontend' }
let(:category_without_changelog) { Gitlab::Danger::Changelog::NO_CHANGELOG_CATEGORIES.first }
let(:label_without_changelog) { Gitlab::Danger::Changelog::NO_CHANGELOG_LABELS.first }
- subject { changelog.needed? }
+ subject { changelog.optional? }
context 'when MR contains only categories requiring no changelog' do
let(:changes_by_category) { { category_without_changelog => nil } }
@@ -121,4 +148,43 @@ RSpec.describe Gitlab::Danger::Changelog do
it { is_expected.to be_falsy }
end
end
+
+ describe '#modified_text' do
+ let(:sanitize_mr_title) { 'Fake Title' }
+ let(:mr_json) { { "iid" => 1234, "title" => sanitize_mr_title } }
+
+ subject { changelog.modified_text }
+
+ it do
+ expect(subject).to include('CHANGELOG.md was edited')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
+ end
+
+ describe '#required_text' do
+ let(:sanitize_mr_title) { 'Fake Title' }
+ let(:mr_json) { { "iid" => 1234, "title" => sanitize_mr_title } }
+
+ subject { changelog.required_text }
+
+ it do
+ expect(subject).to include('CHANGELOG missing')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).not_to include('--ee')
+ end
+ end
+
+ describe 'optional_text' do
+ let(:sanitize_mr_title) { 'Fake Title' }
+ let(:mr_json) { { "iid" => 1234, "title" => sanitize_mr_title } }
+
+ subject { changelog.optional_text }
+
+ it do
+ expect(subject).to include('CHANGELOG missing')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
+ end
end
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index e5018e46634..c7d55c396ef 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -76,6 +76,30 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
+ describe '#changed_lines' do
+ subject { helper.changed_lines('changed_file.rb') }
+
+ before do
+ allow(fake_git).to receive(:diff_for_file).with('changed_file.rb').and_return(diff)
+ end
+
+ context 'when file has diff' do
+ let(:diff) { double(:diff, patch: "+ # New change here\n+ # New change there") }
+
+ it 'returns file changes' do
+ is_expected.to eq(['+ # New change here', '+ # New change there'])
+ end
+ end
+
+ context 'when file has no diff (renamed without changes)' do
+ let(:diff) { nil }
+
+ it 'returns a blank array' do
+ is_expected.to eq([])
+ end
+ end
+ end
+
describe "changed_files" do
it 'returns list of changed files matching given regex' do
expect(helper).to receive(:all_changed_files).and_return(%w[migration.rb usage_data.rb])
@@ -371,22 +395,6 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
- describe '#missing_database_labels' do
- subject { helper.missing_database_labels(current_mr_labels) }
-
- context 'when current merge request has ~database::review pending' do
- let(:current_mr_labels) { ['database::review pending', 'feature'] }
-
- it { is_expected.to match_array(['database']) }
- end
-
- context 'when current merge request does not have ~database::review pending' do
- let(:current_mr_labels) { ['feature'] }
-
- it { is_expected.to match_array(['database', 'database::review pending']) }
- end
- end
-
describe '#sanitize_mr_title' do
where(:mr_title, :expected_mr_title) do
'My MR title' | 'My MR title'
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index 12819614fab..6fd32493d6b 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -170,47 +170,38 @@ RSpec.describe Gitlab::Danger::Teammate do
end
describe '#markdown_name' do
- context 'when timezone_experiment == false' do
- it 'returns markdown name as-is' do
- expect(subject.markdown_name).to eq(options['markdown_name'])
- expect(subject.markdown_name(timezone_experiment: false)).to eq(options['markdown_name'])
- end
+ it 'returns markdown name with timezone info' do
+ expect(subject.markdown_name).to eq("#{options['markdown_name']} (UTC+2)")
end
- context 'when timezone_experiment == true' do
- it 'returns markdown name with timezone info' do
- expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options['markdown_name']} (UTC+2)")
- end
-
- context 'when offset is 1.5' do
- let(:tz_offset_hours) { 1.5 }
+ context 'when offset is 1.5' do
+ let(:tz_offset_hours) { 1.5 }
- it 'returns markdown name with timezone info, not truncated' do
- expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options['markdown_name']} (UTC+1.5)")
- end
+ it 'returns markdown name with timezone info, not truncated' do
+ expect(subject.markdown_name).to eq("#{options['markdown_name']} (UTC+1.5)")
end
+ end
- context 'when author is given' do
- where(:tz_offset_hours, :author_offset, :diff_text) do
- -12 | -10 | "2 hours behind `@mario`"
- -10 | -12 | "2 hours ahead of `@mario`"
- -10 | 2 | "12 hours behind `@mario`"
- 2 | 4 | "2 hours behind `@mario`"
- 4 | 2 | "2 hours ahead of `@mario`"
- 2 | 3 | "1 hour behind `@mario`"
- 3 | 2 | "1 hour ahead of `@mario`"
- 2 | 2 | "same timezone as `@mario`"
- end
+ context 'when author is given' do
+ where(:tz_offset_hours, :author_offset, :diff_text) do
+ -12 | -10 | "2 hours behind `@mario`"
+ -10 | -12 | "2 hours ahead of `@mario`"
+ -10 | 2 | "12 hours behind `@mario`"
+ 2 | 4 | "2 hours behind `@mario`"
+ 4 | 2 | "2 hours ahead of `@mario`"
+ 2 | 3 | "1 hour behind `@mario`"
+ 3 | 2 | "1 hour ahead of `@mario`"
+ 2 | 2 | "same timezone as `@mario`"
+ end
- with_them do
- it 'returns markdown name with timezone info' do
- author = described_class.new(options.merge('username' => 'mario', 'tz_offset_hours' => author_offset))
+ with_them do
+ it 'returns markdown name with timezone info' do
+ author = described_class.new(options.merge('username' => 'mario', 'tz_offset_hours' => author_offset))
- floored_offset_hours = subject.__send__(:floored_offset_hours)
- utc_offset = floored_offset_hours >= 0 ? "+#{floored_offset_hours}" : floored_offset_hours
+ floored_offset_hours = subject.__send__(:floored_offset_hours)
+ utc_offset = floored_offset_hours >= 0 ? "+#{floored_offset_hours}" : floored_offset_hours
- expect(subject.markdown_name(timezone_experiment: true, author: author)).to eq("#{options['markdown_name']} (UTC#{utc_offset}, #{diff_text})")
- end
+ expect(subject.markdown_name(author: author)).to eq("#{options['markdown_name']} (UTC#{utc_offset}, #{diff_text})")
end
end
end
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index 57bde6262a9..155e66e2fcd 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Deployment do
describe '.build' do
it 'returns the object kind for a deployment' do
- deployment = build(:deployment)
+ deployment = build(:deployment, deployable: nil, environment: create(:environment))
data = described_class.build(deployment)
diff --git a/spec/lib/gitlab/database/background_migration_job_spec.rb b/spec/lib/gitlab/database/background_migration_job_spec.rb
index 40f47325be3..dd5bf8b512f 100644
--- a/spec/lib/gitlab/database/background_migration_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration_job_spec.rb
@@ -71,6 +71,15 @@ RSpec.describe Gitlab::Database::BackgroundMigrationJob do
expect(job4.reload).to be_pending
end
+ it 'returns the number of jobs updated' do
+ expect(described_class.succeeded.count).to eq(0)
+
+ jobs_updated = described_class.mark_all_as_succeeded('::TestJob', [1, 100])
+
+ expect(jobs_updated).to eq(2)
+ expect(described_class.succeeded.count).to eq(2)
+ end
+
context 'when previous matching jobs have already succeeded' do
let(:initial_time) { Time.now.round }
let!(:job1) { create(:background_migration_job, :succeeded, created_at: initial_time, updated_at: initial_time) }
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 1f84a915cdc..71d3666602f 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -9,12 +9,16 @@ RSpec.describe Gitlab::Database::BatchCount do
let(:column) { :author_id }
let(:in_transaction) { false }
- let(:user) { create(:user) }
- let(:another_user) { create(:user) }
- before do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:another_user) { create(:user) }
+
+ before_all do
create_list(:issue, 3, author: user)
create_list(:issue, 2, author: another_user)
+ end
+
+ before do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
end
diff --git a/spec/lib/gitlab/database/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/concurrent_reindex_spec.rb
new file mode 100644
index 00000000000..4e2c3f547d4
--- /dev/null
+++ b/spec/lib/gitlab/database/concurrent_reindex_spec.rb
@@ -0,0 +1,207 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::ConcurrentReindex, '#execute' do
+ subject { described_class.new(index_name, logger: logger) }
+
+ let(:table_name) { '_test_reindex_table' }
+ let(:column_name) { '_test_column' }
+ let(:index_name) { '_test_reindex_index' }
+ let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ #{column_name} integer NOT NULL);
+
+ CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ context 'when the index does not exist' do
+ before do
+ connection.execute(<<~SQL)
+ DROP INDEX #{index_name}
+ SQL
+ end
+
+ it 'raises an error' do
+ expect { subject.execute }.to raise_error(described_class::ReindexError, /does not exist/)
+ end
+ end
+
+ context 'when the index is unique' do
+ before do
+ connection.execute(<<~SQL)
+ DROP INDEX #{index_name};
+ CREATE UNIQUE INDEX #{index_name} ON #{table_name} (#{column_name})
+ SQL
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.execute
+ end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
+ end
+ end
+
+ context 'replacing the original index with a rebuilt copy' do
+ let(:replacement_name) { 'tmp_reindex__test_reindex_index' }
+ let(:replaced_name) { 'old_reindex__test_reindex_index' }
+
+ let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
+ let(:drop_index) { "DROP INDEX CONCURRENTLY IF EXISTS #{replacement_name}" }
+
+ let!(:original_index) { find_index_create_statement }
+
+ before do
+ allow(subject).to receive(:connection).and_return(connection)
+ allow(subject).to receive(:disable_statement_timeout).and_yield
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
+
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
+ expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
+ expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.execute
+
+ check_index_exists
+ end
+
+ context 'when a dangling index is left from a previous run' do
+ before do
+ connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
+
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
+ expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
+ expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.execute
+
+ check_index_exists
+ end
+ end
+
+ context 'when it fails to create the replacement index' do
+ it 'safely cleans up and signals the error' do
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect(connection).to receive(:execute).with(create_index).ordered
+ .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when the replacement index is not valid' do
+ it 'safely cleans up and signals the error' do
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect(subject).to receive(:replacement_index_valid?).and_return(false)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.execute }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when a database error occurs while swapping the indexes' do
+ it 'safely cleans up and signals the error' do
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect(connection).to receive(:execute).ordered
+ .with("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
+ .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when with_lock_retries fails to acquire the lock' do
+ it 'safely cleans up and signals the error' do
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true)
+ .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
+ end
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.execute }.to raise_error(described_class::ReindexError, /exhausted/)
+
+ check_index_exists
+ end
+ end
+ end
+
+ def expect_to_execute_concurrently_in_order(sql)
+ # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
+ # verify the original call but pass through the non-concurrent form.
+ expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
+
+ def expect_to_execute_in_order(sql)
+ expect(connection).to receive(:execute).with(sql).ordered.and_call_original
+ end
+
+ def find_index_create_statement
+ ActiveRecord::Base.connection.select_value(<<~SQL)
+ SELECT indexdef
+ FROM pg_indexes
+ WHERE schemaname = 'public'
+ AND indexname = #{ActiveRecord::Base.connection.quote(index_name)}
+ SQL
+ end
+
+ def check_index_exists
+ expect(find_index_create_statement).to eq(original_index)
+ end
+end
diff --git a/spec/lib/gitlab/database/custom_structure_spec.rb b/spec/lib/gitlab/database/custom_structure_spec.rb
index b3bdca0acdd..04ce1e4ad9a 100644
--- a/spec/lib/gitlab/database/custom_structure_spec.rb
+++ b/spec/lib/gitlab/database/custom_structure_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Gitlab::Database::CustomStructure do
<<~DATA
-- this file tracks custom GitLab data, such as foreign keys referencing partitioned tables
-- more details can be found in the issue: https://gitlab.com/gitlab-org/gitlab/-/issues/201872
- SET search_path=public;
DATA
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 7d26fbb1132..0bdcca630aa 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -903,15 +903,22 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#change_column_type_concurrently' do
it 'changes the column type' do
expect(model).to receive(:rename_column_concurrently)
- .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: nil)
+ .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: nil, batch_column_name: :id)
model.change_column_type_concurrently('users', 'username', :text)
end
+ it 'passed the batch column name' do
+ expect(model).to receive(:rename_column_concurrently)
+ .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: nil, batch_column_name: :user_id)
+
+ model.change_column_type_concurrently('users', 'username', :text, batch_column_name: :user_id)
+ end
+
context 'with type cast' do
it 'changes the column type with casting the value to the new type' do
expect(model).to receive(:rename_column_concurrently)
- .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: 'JSON')
+ .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: 'JSON', batch_column_name: :id)
model.change_column_type_concurrently('users', 'username', :text, type_cast_function: 'JSON')
end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index 042ac498373..48132d68031 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
let!(:id3) { create(:user).id }
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
before do
diff --git a/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb b/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb
new file mode 100644
index 00000000000..67596211f71
--- /dev/null
+++ b/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Partitioning::PartitionMonitoring do
+ describe '#report_metrics' do
+ subject { described_class.new(models).report_metrics }
+
+ let(:models) { [model] }
+ let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) }
+ let(:partitioning_strategy) { double(missing_partitions: missing_partitions, current_partitions: current_partitions) }
+ let(:table) { "some_table" }
+
+ let(:missing_partitions) do
+ [double]
+ end
+
+ let(:current_partitions) do
+ [double, double]
+ end
+
+ it 'reports number of present partitions' do
+ subject
+
+ expect(Gitlab::Metrics.registry.get(:db_partitions_present).get({ table: table })).to eq(current_partitions.size)
+ end
+
+ it 'reports number of missing partitions' do
+ subject
+
+ expect(Gitlab::Metrics.registry.get(:db_partitions_missing).get({ table: table })).to eq(missing_partitions.size)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
index 49f3f87fe61..ec3d0a6dbcb 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
@@ -107,6 +107,15 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
end.to change { ::Gitlab::Database::BackgroundMigrationJob.succeeded.count }.from(0).to(1)
end
+ it 'returns the number of job records marked as succeeded' do
+ create(:background_migration_job, class_name: "::#{described_class.name}",
+ arguments: [source1.id, source3.id, source_table, destination_table, unique_key])
+
+ jobs_updated = subject.perform(source1.id, source3.id, source_table, destination_table, unique_key)
+
+ expect(jobs_updated).to eq(1)
+ end
+
context 'when the feature flag is disabled' do
let(:mock_connection) { double('connection') }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 86f79b213ae..44ef0b307fe 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -480,6 +480,153 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
end
+ describe '#finalize_backfilling_partitioned_table' do
+ let(:source_table) { 'todos' }
+ let(:source_column) { 'id' }
+
+ context 'when the table is not allowed' do
+ let(:source_table) { :this_table_is_not_allowed }
+
+ it 'raises an error' do
+ expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original
+
+ expect do
+ migration.finalize_backfilling_partitioned_table source_table
+ end.to raise_error(/#{source_table} is not allowed for use/)
+ end
+ end
+
+ context 'when the partitioned table does not exist' do
+ it 'raises an error' do
+ expect(migration).to receive(:table_exists?).with(partitioned_table).and_return(false)
+
+ expect do
+ migration.finalize_backfilling_partitioned_table source_table
+ end.to raise_error(/could not find partitioned table for #{source_table}/)
+ end
+ end
+
+ context 'finishing pending background migration jobs' do
+ let(:source_table_double) { double('table name') }
+ let(:raw_arguments) { [1, 50_000, source_table_double, partitioned_table, source_column] }
+
+ before do
+ allow(migration).to receive(:table_exists?).with(partitioned_table).and_return(true)
+ allow(migration).to receive(:copy_missed_records)
+ allow(migration).to receive(:execute).with(/VACUUM/)
+ end
+
+ it 'finishes remaining jobs for the correct table' do
+ expect_next_instance_of(described_class::JobArguments) do |job_arguments|
+ expect(job_arguments).to receive(:source_table_name).and_call_original
+ end
+
+ expect(Gitlab::BackgroundMigration).to receive(:steal)
+ .with(described_class::MIGRATION_CLASS_NAME)
+ .and_yield(raw_arguments)
+
+ expect(source_table_double).to receive(:==).with(source_table.to_s)
+
+ migration.finalize_backfilling_partitioned_table source_table
+ end
+ end
+
+ context 'when there is missed data' do
+ let(:partitioned_model) { Class.new(ActiveRecord::Base) }
+ let(:timestamp) { Time.utc(2019, 12, 1, 12).round }
+ let!(:todo1) { create(:todo, created_at: timestamp, updated_at: timestamp) }
+ let!(:todo2) { create(:todo, created_at: timestamp, updated_at: timestamp) }
+ let!(:todo3) { create(:todo, created_at: timestamp, updated_at: timestamp) }
+ let!(:todo4) { create(:todo, created_at: timestamp, updated_at: timestamp) }
+
+ let!(:pending_job1) do
+ create(:background_migration_job,
+ class_name: described_class::MIGRATION_CLASS_NAME,
+ arguments: [todo1.id, todo2.id, source_table, partitioned_table, source_column])
+ end
+
+ let!(:pending_job2) do
+ create(:background_migration_job,
+ class_name: described_class::MIGRATION_CLASS_NAME,
+ arguments: [todo3.id, todo3.id, source_table, partitioned_table, source_column])
+ end
+
+ let!(:succeeded_job) do
+ create(:background_migration_job, :succeeded,
+ class_name: described_class::MIGRATION_CLASS_NAME,
+ arguments: [todo4.id, todo4.id, source_table, partitioned_table, source_column])
+ end
+
+ before do
+ partitioned_model.primary_key = :id
+ partitioned_model.table_name = partitioned_table
+
+ allow(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals)
+
+ migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+
+ allow(Gitlab::BackgroundMigration).to receive(:steal)
+ allow(migration).to receive(:execute).with(/VACUUM/)
+ end
+
+ it 'idempotently cleans up after failed background migrations' do
+ expect(partitioned_model.count).to eq(0)
+
+ partitioned_model.insert!(todo2.attributes)
+
+ expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill|
+ allow(backfill).to receive(:transaction_open?).and_return(false)
+
+ expect(backfill).to receive(:perform)
+ .with(todo1.id, todo2.id, source_table, partitioned_table, source_column)
+ .and_call_original
+
+ expect(backfill).to receive(:perform)
+ .with(todo3.id, todo3.id, source_table, partitioned_table, source_column)
+ .and_call_original
+ end
+
+ migration.finalize_backfilling_partitioned_table source_table
+
+ expect(partitioned_model.count).to eq(3)
+
+ [todo1, todo2, todo3].each do |original|
+ copy = partitioned_model.find(original.id)
+ expect(copy.attributes).to eq(original.attributes)
+ end
+
+ expect(partitioned_model.find_by_id(todo4.id)).to be_nil
+
+ [pending_job1, pending_job2].each do |job|
+ expect(job.reload).to be_succeeded
+ end
+ end
+
+ it 'raises an error if no job tracking records are marked as succeeded' do
+ expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill|
+ allow(backfill).to receive(:transaction_open?).and_return(false)
+
+ expect(backfill).to receive(:perform).and_return(0)
+ end
+
+ expect do
+ migration.finalize_backfilling_partitioned_table source_table
+ end.to raise_error(/failed to update tracking record/)
+ end
+
+ it 'vacuums the table after loading is complete' do
+ expect_next_instance_of(Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable) do |backfill|
+ allow(backfill).to receive(:perform).and_return(1)
+ end
+
+ expect(migration).to receive(:disable_statement_timeout).and_call_original
+ expect(migration).to receive(:execute).with("VACUUM FREEZE ANALYZE #{partitioned_table}")
+
+ migration.finalize_backfilling_partitioned_table source_table
+ end
+ end
+ end
+
def filter_columns_by_name(columns, names)
columns.reject { |c| names.include?(c.name) }
end
diff --git a/spec/lib/gitlab/database/schema_cleaner_spec.rb b/spec/lib/gitlab/database/schema_cleaner_spec.rb
index 1303ad7a311..950759c7f96 100644
--- a/spec/lib/gitlab/database/schema_cleaner_spec.rb
+++ b/spec/lib/gitlab/database/schema_cleaner_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe Gitlab::Database::SchemaCleaner do
expect(subject).not_to include('COMMENT ON EXTENSION')
end
- it 'sets the search_path' do
- expect(subject.split("\n").first).to eq('SET search_path=public;')
+ it 'no assumption about public being the default schema' do
+ expect(subject).not_to match(/public\.\w+/)
end
it 'cleans up the full schema as expected (blackbox test with example)' do
diff --git a/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
index 67da59d6477..98a8e144d16 100644
--- a/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
+++ b/spec/lib/gitlab/database_importers/common_metrics/prometheus_metric_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetric do
end
it '.group_titles equals ::PrometheusMetric' do
- existing_group_titles = ::PrometheusMetricEnums.group_details.transform_values do |value|
+ existing_group_titles = Enums::PrometheusMetric.group_details.transform_values do |value|
value[:group_title]
end
expect(Gitlab::DatabaseImporters::CommonMetrics::PrometheusMetricEnums.group_titles).to eq(existing_group_titles)
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
index a3661bbe49a..39029322e25 100644
--- a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -65,8 +65,8 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
it 'creates group' do
expect(result[:status]).to eq(:success)
expect(group).to be_persisted
- expect(group.name).to eq('GitLab Instance Administrators')
- expect(group.path).to start_with('gitlab-instance-administrators')
+ expect(group.name).to eq('GitLab Instance')
+ expect(group.path).to start_with('gitlab-instance')
expect(group.path.split('-').last.length).to eq(8)
expect(group.visibility_level).to eq(described_class::VISIBILITY_LEVEL)
end
diff --git a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
index 9f10811d765..30981e4bd7d 100644
--- a/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb
@@ -33,9 +33,9 @@ RSpec.describe Gitlab::DiscussionsDiff::HighlightCache, :clean_gitlab_redis_cach
mapping.each do |key, value|
full_key = described_class.cache_key_for(key)
- found = Gitlab::Redis::Cache.with { |r| r.get(full_key) }
+ found_key = Gitlab::Redis::Cache.with { |r| r.get(full_key) }
- expect(found).to eq(value.to_json)
+ expect(described_class.gzip_decompress(found_key)).to eq(value.to_json)
end
end
end
diff --git a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
index ee2173a9c8d..1a7d837af73 100644
--- a/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_issue_handler_spec.rb
@@ -4,17 +4,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do
include_context :email_shared_context
- it_behaves_like :reply_processing_shared_examples
-
- before do
- stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.adventuretime.ooo")
- stub_config_setting(host: 'localhost')
- end
-
- let(:email_raw) { email_fixture('emails/valid_new_issue.eml') }
- let(:namespace) { create(:namespace, path: 'gitlabhq') }
-
- let!(:project) { create(:project, :public, namespace: namespace, path: 'gitlabhq') }
let!(:user) do
create(
:user,
@@ -23,6 +12,17 @@ RSpec.describe Gitlab::Email::Handler::CreateIssueHandler do
)
end
+ let!(:project) { create(:project, :public, namespace: namespace, path: 'gitlabhq') }
+ let(:namespace) { create(:namespace, path: 'gitlabhq') }
+ let(:email_raw) { email_fixture('emails/valid_new_issue.eml') }
+
+ it_behaves_like :reply_processing_shared_examples
+
+ before do
+ stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.adventuretime.ooo")
+ stub_config_setting(host: 'localhost')
+ end
+
context "when email key" do
let(:mail) { Mail::Message.new(email_raw) }
diff --git a/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
index 75d5fc040cb..37ee4591db0 100644
--- a/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_merge_request_handler_spec.rb
@@ -4,6 +4,18 @@ require 'spec_helper'
RSpec.describe Gitlab::Email::Handler::CreateMergeRequestHandler do
include_context :email_shared_context
+ let!(:user) do
+ create(
+ :user,
+ email: 'jake@adventuretime.ooo',
+ incoming_email_token: 'auth_token'
+ )
+ end
+
+ let!(:project) { create(:project, :public, :repository, namespace: namespace, path: 'gitlabhq') }
+ let(:namespace) { create(:namespace, path: 'gitlabhq') }
+ let(:email_raw) { email_fixture('emails/valid_new_merge_request.eml') }
+
it_behaves_like :reply_processing_shared_examples
before do
@@ -15,18 +27,6 @@ RSpec.describe Gitlab::Email::Handler::CreateMergeRequestHandler do
TestEnv.clean_test_path
end
- let(:email_raw) { email_fixture('emails/valid_new_merge_request.eml') }
- let(:namespace) { create(:namespace, path: 'gitlabhq') }
-
- let!(:project) { create(:project, :public, :repository, namespace: namespace, path: 'gitlabhq') }
- let!(:user) do
- create(
- :user,
- email: 'jake@adventuretime.ooo',
- incoming_email_token: 'auth_token'
- )
- end
-
context "when email key" do
let(:mail) { Mail::Message.new(email_raw) }
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index e5598bbd10f..07b8070be30 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -4,6 +4,16 @@ require 'spec_helper'
RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
include_context :email_shared_context
+ let!(:sent_notification) do
+ SentNotification.record_note(note, user.id, mail_key)
+ end
+
+ let(:noteable) { note.noteable }
+ let(:note) { create(:diff_note_on_merge_request, project: project) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:email_raw) { fixture_file('emails/valid_reply.eml') }
+
it_behaves_like :reply_processing_shared_examples
before do
@@ -11,16 +21,6 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
stub_config_setting(host: 'localhost')
end
- let(:email_raw) { fixture_file('emails/valid_reply.eml') }
- let(:project) { create(:project, :public, :repository) }
- let(:user) { create(:user) }
- let(:note) { create(:diff_note_on_merge_request, project: project) }
- let(:noteable) { note.noteable }
-
- let!(:sent_notification) do
- SentNotification.record_note(note, user.id, mail_key)
- end
-
context "when the recipient address doesn't include a mail key" do
let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub(mail_key, "") }
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index 592d3f3f0e4..ccff902d290 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -36,6 +36,12 @@ RSpec.describe Gitlab::Email::Receiver do
it_behaves_like 'correctly finds the mail key'
end
+ context 'when in an X-Envelope-To header' do
+ let(:email_raw) { fixture_file('emails/x_envelope_to_header.eml') }
+
+ it_behaves_like 'correctly finds the mail key'
+ end
+
context 'when enclosed with angle brackets in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header_with_angle_brackets.eml') }
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
new file mode 100644
index 00000000000..797707114a1
--- /dev/null
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
+ describe '#process' do
+ subject { described_class.new }
+
+ context 'when there is no GRPC exception' do
+ let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
+
+ it 'leaves data unchanged' do
+ expect(subject.process(data)).to eq(data)
+ end
+ end
+
+ context 'when there is a GPRC exception with a debug string' do
+ let(:data) do
+ {
+ exception: {
+ values: [
+ {
+ type: "GRPC::DeadlineExceeded",
+ value: "4:DeadlineExceeded. debug_error_string:{\"hello\":1}"
+ }
+ ]
+ },
+ extra: {
+ caller: 'test'
+ },
+ fingerprint: [
+ "GRPC::DeadlineExceeded",
+ "4:Deadline Exceeded. debug_error_string:{\"created\":\"@1598938192.005782000\",\"description\":\"Error received from peer unix:/home/git/gitalypraefect.socket\",\"file\":\"src/core/lib/surface/call.cc\",\"file_line\":1055,\"grpc_message\":\"Deadline Exceeded\",\"grpc_status\":4}"
+ ]
+ }
+ end
+
+ let(:expected) do
+ {
+ fingerprint: [
+ "GRPC::DeadlineExceeded",
+ "4:Deadline Exceeded."
+ ],
+ exception: {
+ values: [
+ {
+ type: "GRPC::DeadlineExceeded",
+ value: "4:DeadlineExceeded."
+ }
+ ]
+ },
+ extra: {
+ caller: 'test',
+ grpc_debug_error_string: "{\"hello\":1}"
+ }
+ }
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(subject.process(data)).to eq(expected)
+ end
+
+ context 'with no custom fingerprint' do
+ before do
+ data.delete(:fingerprint)
+ expected.delete(:fingerprint)
+ end
+
+ it 'removes the debug error string and stores it as an extra field' do
+ expect(subject.process(data)).to eq(expected)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 2de5e1e20d6..9bc865f4d29 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -295,6 +295,19 @@ RSpec.describe Gitlab::Experimentation do
end
end
end
+
+ describe '#experiment_tracking_category_and_group' do
+ let_it_be(:experiment_key) { :test_something }
+
+ subject { controller.experiment_tracking_category_and_group(experiment_key) }
+
+ it 'returns a string with the experiment tracking category & group joined with a ":"' do
+ expect(controller).to receive(:tracking_category).with(experiment_key).and_return('Experiment::Category')
+ expect(controller).to receive(:tracking_group).with(experiment_key, '_group').and_return('experimental_group')
+
+ expect(subject).to eq('Experiment::Category:experimental_group')
+ end
+ end
end
describe '.enabled?' do
diff --git a/spec/lib/gitlab/external_authorization/access_spec.rb b/spec/lib/gitlab/external_authorization/access_spec.rb
index 4bb81230ac0..a6773cc19e1 100644
--- a/spec/lib/gitlab/external_authorization/access_spec.rb
+++ b/spec/lib/gitlab/external_authorization/access_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
describe '#loaded?' do
it 'is `true` when it was loaded recently' do
- Timecop.freeze do
+ freeze_time do
allow(access).to receive(:loaded_at).and_return(5.minutes.ago)
expect(access).to be_loaded
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
end
it 'is `false` when there the result was loaded a long time ago' do
- Timecop.freeze do
+ freeze_time do
allow(access).to receive(:loaded_at).and_return(2.weeks.ago)
expect(access).not_to be_loaded
@@ -70,7 +70,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
end
it 'stores the result in redis' do
- Timecop.freeze do
+ freeze_time do
fake_cache = double
expect(fake_cache).to receive(:store).with(true, nil, Time.now)
expect(access).to receive(:cache).and_return(fake_cache)
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
end
it 'does not load from the webservice' do
- Timecop.freeze do
+ freeze_time do
expect(fake_cache).to receive(:load).and_return([true, nil, Time.now])
expect(access).to receive(:load_from_cache).and_call_original
@@ -129,7 +129,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
end
it 'loads from the webservice when the cached result was too old' do
- Timecop.freeze do
+ freeze_time do
expect(fake_cache).to receive(:load).and_return([true, nil, 2.days.ago])
expect(access).to receive(:load_from_cache).and_call_original
diff --git a/spec/lib/gitlab/external_authorization/cache_spec.rb b/spec/lib/gitlab/external_authorization/cache_spec.rb
index 9037c04cf2b..a8e7932b82c 100644
--- a/spec/lib/gitlab/external_authorization/cache_spec.rb
+++ b/spec/lib/gitlab/external_authorization/cache_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Cache, :clean_gitlab_redis_cache d
describe '#load' do
it 'reads stored info from redis' do
- Timecop.freeze do
+ freeze_time do
set_in_redis(:access, false)
set_in_redis(:reason, 'Access denied for now')
set_in_redis(:refreshed_at, Time.now)
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Cache, :clean_gitlab_redis_cache d
describe '#store' do
it 'sets the values in redis' do
- Timecop.freeze do
+ freeze_time do
cache.store(true, 'the reason', Time.now)
expect(read_from_redis(:access)).to eq('true')
diff --git a/spec/lib/gitlab/file_type_detection_spec.rb b/spec/lib/gitlab/file_type_detection_spec.rb
index ba5e7cfabf2..c435d3f6097 100644
--- a/spec/lib/gitlab/file_type_detection_spec.rb
+++ b/spec/lib/gitlab/file_type_detection_spec.rb
@@ -192,6 +192,20 @@ RSpec.describe Gitlab::FileTypeDetection do
end
end
+ describe '#image_safe_for_scaling?' do
+ it 'returns true for allowed image formats' do
+ uploader.store!(upload_fixture('rails_sample.jpg'))
+
+ expect(uploader).to be_image_safe_for_scaling
+ end
+
+ it 'returns false for other files' do
+ uploader.store!(upload_fixture('unsanitized.svg'))
+
+ expect(uploader).not_to be_image_safe_for_scaling
+ end
+ end
+
describe '#dangerous_image?' do
it 'returns true if filename has a dangerous extension' do
uploader.store!(upload_fixture('unsanitized.svg'))
@@ -377,6 +391,31 @@ RSpec.describe Gitlab::FileTypeDetection do
end
end
+ describe '#image_safe_for_scaling?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:filename, :expectation) do
+ 'img.jpg' | true
+ 'img.jpeg' | true
+ 'img.png' | true
+ 'img.svg' | false
+ end
+
+ with_them do
+ it "returns expected result" do
+ allow(custom_class).to receive(:filename).and_return(filename)
+
+ expect(custom_class.image_safe_for_scaling?).to be(expectation)
+ end
+ end
+
+ it 'returns false if filename is blank' do
+ allow(custom_class).to receive(:filename).and_return(nil)
+
+ expect(custom_class).not_to be_image_safe_for_scaling
+ end
+ end
+
describe '#video?' do
it 'returns true for a video file' do
allow(custom_class).to receive(:filename).and_return('video_sample.mp4')
diff --git a/spec/lib/gitlab/git/base_error_spec.rb b/spec/lib/gitlab/git/base_error_spec.rb
new file mode 100644
index 00000000000..851cfa16512
--- /dev/null
+++ b/spec/lib/gitlab/git/base_error_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::Git::BaseError do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.new(message).to_s }
+
+ where(:message, :result) do
+ "GRPC::DeadlineExceeded: 4:DeadlineExceeded. debug_error_string:{\"hello\":1}" | "GRPC::DeadlineExceeded: 4:DeadlineExceeded."
+ "GRPC::DeadlineExceeded: 4:DeadlineExceeded." | "GRPC::DeadlineExceeded: 4:DeadlineExceeded."
+ "GRPC::DeadlineExceeded: 4:DeadlineExceeded. debug_error_string:{\"created\":\"@1598978902.544524530\",\"description\":\"Error received from peer ipv4: debug_error_string:test\"}" | "GRPC::DeadlineExceeded: 4:DeadlineExceeded."
+ "9:Multiple lines\nTest line. debug_error_string:{\"created\":\"@1599074877.106467000\"}" | "9:Multiple lines\nTest line."
+ "other message" | "other message"
+ nil | "Gitlab::Git::BaseError"
+ end
+
+ with_them do
+ it { is_expected.to eq(result) }
+ end
+end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 491437856d4..8961cdcae7d 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -192,7 +192,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
end
describe '.find with Gitaly enabled' do
- it_should_behave_like '.find'
+ it_behaves_like '.find'
end
describe '.find with Rugged enabled', :enable_rugged do
@@ -204,7 +204,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
described_class.find(repository, SeedRepo::Commit::ID)
end
- it_should_behave_like '.find'
+ it_behaves_like '.find'
end
describe '.last_for_path' do
@@ -474,7 +474,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
end
describe '.batch_by_oid with Gitaly enabled' do
- it_should_behave_like '.batch_by_oid'
+ it_behaves_like '.batch_by_oid'
context 'when oids is empty' do
it 'makes no Gitaly request' do
@@ -486,7 +486,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
end
describe '.batch_by_oid with Rugged enabled', :enable_rugged do
- it_should_behave_like '.batch_by_oid'
+ it_behaves_like '.batch_by_oid'
it 'calls out to the Rugged implementation' do
allow_next_instance_of(Rugged) do |instance|
diff --git a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
index 4d3245fc988..6d143f78c66 100644
--- a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelLinksImporter do
.to receive(:find_target_id)
.and_return(1)
- Timecop.freeze do
+ freeze_time do
expect(Gitlab::Database)
.to receive(:bulk_insert)
.with(
diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
index 0010b959a49..ca9d3e1e21c 100644
--- a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
@@ -85,13 +85,13 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_red
end
it 'includes the created timestamp' do
- Timecop.freeze do
+ freeze_time do
expect(label_hash[:created_at]).to eq(Time.zone.now)
end
end
it 'includes the updated timestamp' do
- Timecop.freeze do
+ freeze_time do
expect(label_hash[:updated_at]).to eq(Time.zone.now)
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index 05ac0248ec9..0835c6155b9 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
.to receive(:increment)
.and_call_original
- Timecop.freeze do
+ freeze_time do
importer.update_repository
expect(project.last_repository_updated_at).to be_like_time(Time.zone.now)
diff --git a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
index 65dba2711b9..180c6d9e420 100644
--- a/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/repository_importer_spec.rb
@@ -261,7 +261,7 @@ RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
describe '#update_clone_time' do
it 'sets the timestamp for when the cloning process finished' do
- Timecop.freeze do
+ freeze_time do
expect(project)
.to receive(:update_column)
.with(:last_repository_updated_at, Time.zone.now)
diff --git a/spec/lib/gitlab/github_import/label_finder_spec.rb b/spec/lib/gitlab/github_import/label_finder_spec.rb
index 452f3c896a4..9905fce2a20 100644
--- a/spec/lib/gitlab/github_import/label_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/label_finder_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::LabelFinder, :clean_gitlab_redis_cache do
- let(:project) { create(:project) }
- let(:finder) { described_class.new(project) }
- let!(:bug) { create(:label, project: project, name: 'Bug') }
- let!(:feature) { create(:label, project: project, name: 'Feature') }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:finder) { described_class.new(project) }
+ let_it_be(:bug) { create(:label, project: project, name: 'Bug') }
+ let_it_be(:feature) { create(:label, project: project, name: 'Feature') }
describe '#id_for' do
context 'with a cache in place' do
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
index 419184d6115..5da45b1897f 100644
--- a/spec/lib/gitlab/github_import/milestone_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache do
- let!(:project) { create(:project) }
- let!(:milestone) { create(:milestone, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
let(:finder) { described_class.new(project) }
describe '#id_for' do
diff --git a/spec/lib/gitlab/gitpod_spec.rb b/spec/lib/gitlab/gitpod_spec.rb
new file mode 100644
index 00000000000..f4dda42aeb4
--- /dev/null
+++ b/spec/lib/gitlab/gitpod_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Gitpod do
+ let_it_be(:user) { create(:user) }
+ let(:feature_scope) { true }
+
+ before do
+ stub_feature_flags(gitpod: feature_scope)
+ end
+
+ describe '.feature_conditional?' do
+ subject { described_class.feature_conditional? }
+
+ context 'when feature is enabled globally' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when feature is enabled only to a resource' do
+ let(:feature_scope) { user }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '.feature_available?' do
+ subject { described_class.feature_available? }
+
+ context 'when feature is enabled globally' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature is enabled only to a resource' do
+ let(:feature_scope) { user }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ describe '.feature_enabled?' do
+ let(:current_user) { nil }
+
+ subject { described_class.feature_enabled?(current_user) }
+
+ context 'when feature is enabled globally' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature is enabled only to a resource' do
+ let(:feature_scope) { user }
+
+ context 'for the same resource' do
+ let(:current_user) { user }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'for a different resource' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index e920fc7cd3b..3fa636a1cf0 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let(:expected_identifier) { "project-#{expected_id}" }
let(:expected_suffix) { '' }
let(:expected_container) { project }
- let(:expected_repository) { expected_container.repository }
+ let(:expected_repository) { ::Repository.new(project.full_path, project, shard: project.repository_storage, disk_path: project.disk_path, repo_type: Gitlab::GlRepository::PROJECT) }
end
it 'knows its type' do
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let(:expected_identifier) { "wiki-#{expected_id}" }
let(:expected_suffix) { '.wiki' }
let(:expected_container) { project }
- let(:expected_repository) { expected_container.wiki.repository }
+ let(:expected_repository) { ::Repository.new(project.wiki.full_path, project, shard: project.wiki.repository_storage, disk_path: project.wiki.disk_path, repo_type: Gitlab::GlRepository::WIKI) }
end
it 'knows its type' do
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let(:expected_id) { personal_snippet.id }
let(:expected_identifier) { "snippet-#{expected_id}" }
let(:expected_suffix) { '' }
- let(:expected_repository) { personal_snippet.repository }
+ let(:expected_repository) { ::Repository.new(personal_snippet.full_path, personal_snippet, shard: personal_snippet.repository_storage, disk_path: personal_snippet.disk_path, repo_type: Gitlab::GlRepository::SNIPPET) }
let(:expected_container) { personal_snippet }
end
@@ -104,7 +104,7 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let(:expected_id) { project_snippet.id }
let(:expected_identifier) { "snippet-#{expected_id}" }
let(:expected_suffix) { '' }
- let(:expected_repository) { project_snippet.repository }
+ let(:expected_repository) { ::Repository.new(project_snippet.full_path, project_snippet, shard: project_snippet.repository_storage, disk_path: project_snippet.disk_path, repo_type: Gitlab::GlRepository::SNIPPET) }
let(:expected_container) { project_snippet }
end
@@ -133,10 +133,14 @@ RSpec.describe Gitlab::GlRepository::RepoType do
let(:expected_identifier) { "design-#{project.id}" }
let(:expected_id) { project.id }
let(:expected_suffix) { '.design' }
- let(:expected_repository) { project.design_repository }
+ let(:expected_repository) { ::DesignManagement::Repository.new(project) }
let(:expected_container) { project }
end
+ it 'uses the design access checker' do
+ expect(described_class.access_checker_class).to eq(::Gitlab::GitAccessDesign)
+ end
+
it 'knows its type' do
aggregate_failures do
expect(described_class).to be_design
diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb
index f90103ee6f7..3733d545155 100644
--- a/spec/lib/gitlab/gl_repository_spec.rb
+++ b/spec/lib/gitlab/gl_repository_spec.rb
@@ -31,15 +31,4 @@ RSpec.describe ::Gitlab::GlRepository do
expect { described_class.parse("project-foo") }.to raise_error(ArgumentError)
end
end
-
- describe 'DESIGN' do
- it 'uses the design access checker' do
- expect(described_class::DESIGN.access_checker_class).to eq(::Gitlab::GitAccessDesign)
- end
-
- it 'builds a design repository' do
- expect(described_class::DESIGN.repository_resolver.call(create(:project)))
- .to be_a(::DesignManagement::Repository)
- end
- end
end
diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
index 81ef7fcda97..d1be962a4f8 100644
--- a/spec/lib/gitlab/graphql/docs/renderer_spec.rb
+++ b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
@@ -36,10 +36,10 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
specify do
expectation = <<~DOC
- ## ArrayTest
+ ### ArrayTest
- | Name | Type | Description |
- | --- | ---- | ---------- |
+ | Field | Type | Description |
+ | ----- | ---- | ----------- |
| `foo` | String! => Array | A description |
DOC
@@ -59,10 +59,10 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
specify do
expectation = <<~DOC
- ## OrderingTest
+ ### OrderingTest
- | Name | Type | Description |
- | --- | ---- | ---------- |
+ | Field | Type | Description |
+ | ----- | ---- | ----------- |
| `bar` | String! | A description of bar field |
| `foo` | String! | A description of foo field |
DOC
@@ -82,15 +82,45 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
specify do
expectation = <<~DOC
- ## DeprecatedTest
+ ### DeprecatedTest
- | Name | Type | Description |
- | --- | ---- | ---------- |
+ | Field | Type | Description |
+ | ----- | ---- | ----------- |
| `foo` **{warning-solid}** | String! | **Deprecated:** This is deprecated. Deprecated in 1.10 |
DOC
is_expected.to include(expectation)
end
end
+
+ context 'A type with an emum field' do
+ let(:type) do
+ enum_type = Class.new(Types::BaseEnum) do
+ graphql_name 'MyEnum'
+
+ value 'BAZ', description: 'A description of BAZ'
+ value 'BAR', description: 'A description of BAR', deprecated: { reason: 'This is deprecated', milestone: '1.10' }
+ end
+
+ Class.new(Types::BaseObject) do
+ graphql_name 'EnumTest'
+
+ field :foo, enum_type, null: false, description: 'A description of foo field'
+ end
+ end
+
+ specify do
+ expectation = <<~DOC
+ ### MyEnum
+
+ | Value | Description |
+ | ----- | ----------- |
+ | `BAR` **{warning-solid}** | **Deprecated:** This is deprecated. Deprecated in 1.10 |
+ | `BAZ` | A description of BAZ |
+ DOC
+
+ is_expected.to include(expectation)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/graphql/loaders/issuable_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/issuable_loader_spec.rb
index 180966de895..33a9d40931e 100644
--- a/spec/lib/gitlab/graphql/loaders/issuable_loader_spec.rb
+++ b/spec/lib/gitlab/graphql/loaders/issuable_loader_spec.rb
@@ -6,9 +6,26 @@ RSpec.describe Gitlab::Graphql::Loaders::IssuableLoader do
subject { described_class.new(parent, finder) }
let(:params) { HashWithIndifferentAccess.new }
+ let(:finder_params) { finder.params.to_h.with_indifferent_access }
+
+ # Dumb finder class, that only implements what we need, and has
+ # predictable query counts.
+ let(:finder_class) do
+ Class.new(IssuesFinder) do
+ def execute
+ params[:project_id].issues.where(iid: params[:iids])
+ end
+
+ private
+
+ def params_class
+ IssuesFinder::Params
+ end
+ end
+ end
describe '#find_all' do
- let(:finder) { double(:finder, params: params, execute: %i[x y z]) }
+ let(:finder) { issuable_finder(params: params, result: [:x, :y, :z]) }
where(:factory, :param_name) do
%i[project group].map { |thing| [thing, :"#{thing}_id"] }
@@ -19,7 +36,7 @@ RSpec.describe Gitlab::Graphql::Loaders::IssuableLoader do
it 'assignes the parent parameter, and batching_find_alls the finder' do
expect(subject.find_all).to contain_exactly(:x, :y, :z)
- expect(params).to include(param_name => parent)
+ expect(finder_params).to include(param_name => parent)
end
end
@@ -34,12 +51,12 @@ RSpec.describe Gitlab::Graphql::Loaders::IssuableLoader do
describe '#batching_find_all' do
context 'the finder params are anything other than [iids]' do
- let(:finder) { double(:finder, params: params, execute: [:foo]) }
+ let(:finder) { issuable_finder(params: params, result: [:foo]) }
let(:parent) { build_stubbed(:project) }
it 'batching_find_alls the finder, setting the correct parent parameter' do
expect(subject.batching_find_all).to eq([:foo])
- expect(params[:project_id]).to eq(parent)
+ expect(finder_params[:project_id]).to eq(parent)
end
it 'allows a post-process block' do
@@ -48,23 +65,6 @@ RSpec.describe Gitlab::Graphql::Loaders::IssuableLoader do
end
context 'the finder params are exactly [iids]' do
- # Dumb finder class, that only implements what we need, and has
- # predictable query counts.
- let(:finder_class) do
- Class.new do
- attr_reader :current_user, :params
-
- def initialize(user, args)
- @current_user = user
- @params = HashWithIndifferentAccess.new(args.to_h)
- end
-
- def execute
- params[:project_id].issues.where(iid: params[:iids])
- end
- end
- end
-
it 'batches requests' do
issue_a = create(:issue)
issue_b = create(:issue)
@@ -93,4 +93,13 @@ RSpec.describe Gitlab::Graphql::Loaders::IssuableLoader do
end
end
end
+
+ private
+
+ def issuable_finder(user: double(:user), params: {}, result: nil)
+ new_finder = finder_class.new(user, params)
+ allow(new_finder).to receive(:execute).and_return(result) if result
+
+ new_finder
+ end
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 09d7e084172..c8f368b15fc 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -262,6 +262,22 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
end
+ context 'when ordering by similarity' do
+ let!(:project1) { create(:project, name: 'test') }
+ let!(:project2) { create(:project, name: 'testing') }
+ let!(:project3) { create(:project, name: 'tests') }
+ let!(:project4) { create(:project, name: 'testing stuff') }
+ let!(:project5) { create(:project, name: 'test') }
+
+ let(:nodes) do
+ Project.sorted_by_similarity_desc('test', include_in_select: true)
+ end
+
+ let(:descending_nodes) { nodes.to_a }
+
+ it_behaves_like 'nodes are in descending order'
+ end
+
context 'when an invalid cursor is provided' do
let(:arguments) { { before: Base64Bp.urlsafe_encode64('invalidcursor', padding: false) } }
@@ -358,15 +374,6 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
end
- context 'when before and last does not request all remaining nodes' do
- let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
-
- it 'has a previous and a next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_truthy
- end
- end
-
context 'when before and last does request all remaining nodes' do
let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
index 9f310f30253..444c10074a0 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
@@ -51,6 +51,18 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::OrderInfo do
expect(order_list.last.operator_for(:after)).to eq '>'
end
end
+
+ context 'when ordering by SIMILARITY' do
+ let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
+
+ it 'assigns the right attribute name, named function, and direction' do
+ expect(order_list.count).to eq 2
+ expect(order_list.first.attribute_name).to eq 'similarity'
+ expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::Addition)
+ expect(order_list.first.named_function.to_sql).to include 'SIMILARITY('
+ expect(order_list.first.sort_direction).to eq :desc
+ end
+ end
end
describe '#validate_ordering' do
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
index 31c02fd43e8..c7e7db4d535 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
@@ -131,5 +131,42 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::QueryBuilder do
end
end
end
+
+ context 'when sorting using SIMILARITY' do
+ let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
+ let(:arel_table) { Project.arel_table }
+ let(:decoded_cursor) { { 'similarity' => 0.5, 'id' => 100 } }
+ let(:similarity_sql) do
+ [
+ '(SIMILARITY(COALESCE("projects"."path", \'\'), \'test\') * CAST(\'1\' AS numeric))',
+ '(SIMILARITY(COALESCE("projects"."name", \'\'), \'test\') * CAST(\'0.7\' AS numeric))',
+ '(SIMILARITY(COALESCE("projects"."description", \'\'), \'test\') * CAST(\'0.2\' AS numeric))'
+ ].join(' + ')
+ end
+
+ context 'when no values are nil' do
+ context 'when :after' do
+ it 'generates the correct condition' do
+ conditions = builder.conditions.gsub(/\s+/, ' ')
+
+ expect(conditions).to include "(#{similarity_sql} < 0.5)"
+ expect(conditions).to include '"projects"."id" < 100'
+ expect(conditions).to include "OR (#{similarity_sql} IS NULL)"
+ end
+ end
+
+ context 'when :before' do
+ let(:before_or_after) { :before }
+
+ it 'generates the correct condition' do
+ conditions = builder.conditions.gsub(/\s+/, ' ')
+
+ expect(conditions).to include "(#{similarity_sql} > 0.5)"
+ expect(conditions).to include '"projects"."id" > 100'
+ expect(conditions).to include "OR ( #{similarity_sql} = 0.5"
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index b6a3c8b5e76..045c922783a 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -3,10 +3,43 @@
require 'spec_helper'
RSpec.describe Gitlab::GroupSearchResults do
- let(:user) { create(:user) }
+ # group creation calls GroupFinder, so need to create the group
+ # before so expect(GroupsFinder) check works
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+ let(:filters) { {} }
+ let(:limit_projects) { Project.all }
+ let(:query) { 'gob' }
+
+ subject(:results) { described_class.new(user, query, limit_projects, group: group, filters: filters) }
+
+ describe 'issues search' do
+ let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
+ let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
+ let(:query) { 'foo' }
+ let(:scope) { 'issues' }
+
+ include_examples 'search results filtered by state'
+ end
+
+ describe 'merge_requests search' do
+ let(:opened_result) { create(:merge_request, :opened, source_project: project, title: 'foo opened') }
+ let(:closed_result) { create(:merge_request, :closed, source_project: project, title: 'foo closed') }
+ let(:query) { 'foo' }
+ let(:scope) { 'merge_requests' }
+
+ before do
+ # we're creating those instances in before block because otherwise factory for MRs will fail on after(:build)
+ opened_result
+ closed_result
+ end
+
+ include_examples 'search results filtered by state'
+ end
describe 'user search' do
- let(:group) { create(:group) }
+ subject(:objects) { results.objects('users') }
it 'returns the users belonging to the group matching the search query' do
user1 = create(:user, username: 'gob_bluth')
@@ -17,9 +50,7 @@ RSpec.describe Gitlab::GroupSearchResults do
create(:user, username: 'gob_2018')
- result = described_class.new(user, anything, group, 'gob').objects('users')
-
- expect(result).to eq [user1]
+ is_expected.to eq [user1]
end
it 'returns the user belonging to the subgroup matching the search query' do
@@ -29,9 +60,7 @@ RSpec.describe Gitlab::GroupSearchResults do
create(:user, username: 'gob_2018')
- result = described_class.new(user, anything, group, 'gob').objects('users')
-
- expect(result).to eq [user1]
+ is_expected.to eq [user1]
end
it 'returns the user belonging to the parent group matching the search query' do
@@ -41,9 +70,7 @@ RSpec.describe Gitlab::GroupSearchResults do
create(:user, username: 'gob_2018')
- result = described_class.new(user, anything, group, 'gob').objects('users')
-
- expect(result).to eq [user1]
+ is_expected.to eq [user1]
end
it 'does not return the user belonging to the private subgroup' do
@@ -53,9 +80,7 @@ RSpec.describe Gitlab::GroupSearchResults do
create(:user, username: 'gob_2018')
- result = described_class.new(user, anything, group, 'gob').objects('users')
-
- expect(result).to eq []
+ is_expected.to be_empty
end
it 'does not return the user belonging to an unrelated group' do
@@ -63,15 +88,26 @@ RSpec.describe Gitlab::GroupSearchResults do
unrelated_group = create(:group)
create(:group_member, :developer, user: user, group: unrelated_group)
- result = described_class.new(user, anything, group, 'gob').objects('users')
+ is_expected.to be_empty
+ end
- expect(result).to eq []
+ it 'does not return the user invited to the group' do
+ user = create(:user, username: 'gob_bluth')
+ create(:group_member, :invited, :developer, user: user, group: group)
+
+ is_expected.to be_empty
end
- it 'sets include_subgroups flag by default' do
- result = described_class.new(user, anything, group, 'gob')
+ it 'calls GroupFinder during execution' do
+ expect(GroupsFinder).to receive(:new).with(user).and_call_original
- expect(result.issuable_params[:include_subgroups]).to eq(true)
+ subject
+ end
+ end
+
+ describe "#issuable_params" do
+ it 'sets include_subgroups flag by default' do
+ expect(results.issuable_params[:include_subgroups]).to eq(true)
end
end
end
diff --git a/spec/lib/gitlab/hashed_storage/migrator_spec.rb b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
index 0549b3128c7..f4f15cab05a 100644
--- a/spec/lib/gitlab/hashed_storage/migrator_spec.rb
+++ b/spec/lib/gitlab/hashed_storage/migrator_spec.rb
@@ -232,4 +232,16 @@ RSpec.describe Gitlab::HashedStorage::Migrator, :redis do
expect(subject.rollback_pending?).to be_falsey
end
end
+
+ describe 'abort_rollback!' do
+ let_it_be(:project) { create(:project, :empty_repo) }
+
+ it 'removes any rollback related scheduled job' do
+ Sidekiq::Testing.disable! do
+ ::HashedStorage::RollbackerWorker.perform_async(1, 5)
+
+ expect { subject.abort_rollback! }.to change { subject.rollback_pending? }.from(true).to(false)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 5c990eb3248..308f7f46251 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -157,17 +157,6 @@ RSpec.describe Gitlab::HTTP do
described_class.put('http://example.org', write_timeout: 1)
end
end
-
- context 'when default timeouts feature is disabled' do
- it 'does not apply any defaults' do
- stub_feature_flags(http_default_timeouts: false)
- expect(described_class).to receive(:httparty_perform_request).with(
- Net::HTTP::Get, 'http://example.org', open_timeout: 1
- ).and_call_original
-
- described_class.get('http://example.org', open_timeout: 1)
- end
- end
end
describe '.try_get' do
diff --git a/spec/lib/gitlab/i18n/po_linter_spec.rb b/spec/lib/gitlab/i18n/po_linter_spec.rb
index cfa39d95ebd..9165ccfb1ef 100644
--- a/spec/lib/gitlab/i18n/po_linter_spec.rb
+++ b/spec/lib/gitlab/i18n/po_linter_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe Gitlab::I18n::PoLinter do
let(:po_path) { 'spec/fixtures/unescaped_chars.po' }
it 'contains an error' do
- message_id = 'You are going to transfer %{project_name_with_namespace} to another owner. Are you ABSOLUTELY sure?'
+ message_id = 'You are going to transfer %{project_name_with_namespace} to another namespace. Are you ABSOLUTELY sure?'
expected_error = 'translation contains unescaped `%`, escape it using `%%`'
expect(errors[message_id]).to include(expected_error)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 37b5d8a1021..3126d87a0d6 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -15,6 +15,7 @@ issues:
- resource_iteration_events
- sent_notifications
- sentry_issue
+- issuable_severity
- label_links
- labels
- last_edited_by
@@ -28,6 +29,7 @@ issues:
- merge_requests_closing_issues
- metrics
- timelogs
+- issuable_severity
- issue_assignees
- closed_by
- epic_issue
@@ -120,6 +122,7 @@ merge_requests:
- award_emoji
- author
- assignee
+- reviewers
- updated_by
- milestone
- iteration
@@ -127,6 +130,7 @@ merge_requests:
- resource_label_events
- resource_milestone_events
- resource_state_events
+- resource_iteration_events
- label_links
- labels
- last_edited_by
@@ -147,6 +151,7 @@ merge_requests:
- latest_merge_request_diff
- pipelines_for_merge_request
- merge_request_assignees
+- merge_request_reviewers
- suggestions
- diff_note_positions
- unresolved_notes
@@ -175,9 +180,12 @@ external_pull_requests:
merge_request_diff:
- merge_request
- merge_request_diff_commits
+- merge_request_diff_detail
- merge_request_diff_files
merge_request_diff_commits:
- merge_request_diff
+merge_request_diff_detail:
+- merge_request_diff
merge_request_diff_files:
- merge_request_diff
merge_request_context_commits:
@@ -291,6 +299,7 @@ protected_branches:
- merge_access_levels
- push_access_levels
- unprotect_access_levels
+- approval_project_rules
protected_tags:
- project
- create_access_levels
@@ -357,6 +366,7 @@ project:
- youtrack_service
- custom_issue_tracker_service
- bugzilla_service
+- ewm_service
- external_wiki_service
- mock_ci_service
- mock_deployment_service
@@ -409,6 +419,8 @@ project:
- project_feature
- auto_devops
- pages_domains
+- pages_metadatum
+- pages_deployments
- authorized_users
- project_authorizations
- remote_mirrors
@@ -455,7 +467,6 @@ project:
- approval_merge_request_rules
- approvers
- approver_users
-- pages_domains
- audit_events
- path_locks
- approver_groups
@@ -472,6 +483,8 @@ project:
- dast_site_profiles
- dast_scanner_profiles
- dast_sites
+- dast_site_tokens
+- dast_site_validations
- operations_feature_flags
- operations_feature_flags_client
- operations_feature_flags_user_lists
@@ -493,7 +506,6 @@ project:
- designs
- project_aliases
- external_pull_requests
-- pages_metadatum
- alerts_service
- grafana_integration
- remove_source_branch_after_merge
@@ -536,8 +548,11 @@ timelogs:
- issue
- merge_request
- user
+- note
push_event_payload:
- event
+issuable_severity:
+- issue
issue_assignees:
- issue
- assignee
@@ -613,6 +628,7 @@ boards:
- assignee
- labels
- user_preferences
+- boards_epic_user_preferences
lists:
- user
- milestone
@@ -646,6 +662,8 @@ zoom_meetings:
- issue
sentry_issue:
- issue
+issuable_severity:
+- issue
design_versions: *version
epic:
- subscriptions
@@ -673,8 +691,10 @@ epic:
- due_date_sourcing_epic
- events
- resource_label_events
+- resource_state_events
- user_mentions
- note_authors
+- boards_epic_user_preferences
epic_issue:
- epic
- issue
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 5b6be0b3198..93b6f93f0ec 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -133,12 +133,6 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
expect(builds_count).to eq(1)
end
- it 'has no when YML attributes but only the DB column' do
- expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
-
- subject
- end
-
it 'has pipeline commits' do
expect(subject['ci_pipelines']).not_to be_empty
end
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index a2c5848f100..ece261e0882 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -381,12 +381,6 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
expect(project_tree_saver.save).to be true
end
-
- it 'has no when YML attributes but only the DB column' do
- expect_any_instance_of(Gitlab::Ci::YamlProcessor).not_to receive(:build_attributes)
-
- project_tree_saver.save
- end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index a108bc94da5..5ca7c5b7a91 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -206,6 +206,7 @@ MergeRequest:
- head_pipeline_id
- discussion_locked
- allow_maintainer_to_push
+- merge_ref_sha
MergeRequestDiff:
- id
- state
diff --git a/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb b/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb
index 6dc96217f09..535cce6aa04 100644
--- a/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb
+++ b/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require 'timecop'
RSpec.describe Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription do
describe '#to_s' do
@@ -50,7 +49,7 @@ RSpec.describe Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription d
let(:created_at) { nil }
it 'description contains current time in UTC' do
- Timecop.freeze do
+ freeze_time do
now = Time.current.utc.strftime('%d %B %Y, %-l:%M%p (%Z)')
expect(to_s).to include(
diff --git a/spec/lib/gitlab/jira/dvcs_spec.rb b/spec/lib/gitlab/jira/dvcs_spec.rb
new file mode 100644
index 00000000000..09e777b38ea
--- /dev/null
+++ b/spec/lib/gitlab/jira/dvcs_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Jira::Dvcs do
+ describe '.encode_slash' do
+ it 'replaces slash character' do
+ expect(described_class.encode_slash('a/b/c')).to eq('a@b@c')
+ end
+
+ it 'ignores path without slash' do
+ expect(described_class.encode_slash('foo')).to eq('foo')
+ end
+ end
+
+ describe '.decode_slash' do
+ it 'replaces slash character' do
+ expect(described_class.decode_slash('a@b@c')).to eq('a/b/c')
+ end
+
+ it 'ignores path without slash' do
+ expect(described_class.decode_slash('foo')).to eq('foo')
+ end
+ end
+
+ describe '.encode_project_name' do
+ let(:group) { create(:group)}
+ let(:project) { create(:project, group: group)}
+
+ context 'root group' do
+ it 'returns project path' do
+ expect(described_class.encode_project_name(project)).to eq(project.path)
+ end
+ end
+
+ context 'nested group' do
+ let(:group) { create(:group, :nested)}
+
+ it 'returns encoded project full path' do
+ expect(described_class.encode_project_name(project)).to eq(described_class.encode_slash(project.full_path))
+ end
+ end
+ end
+
+ describe '.restore_full_path' do
+ context 'project name is an encoded full path' do
+ it 'returns decoded project path' do
+ expect(described_class.restore_full_path(namespace: 'group1', project: 'group1@group2@project1')).to eq('group1/group2/project1')
+ end
+ end
+
+ context 'project name is not an encoded full path' do
+ it 'assumes project belongs to root namespace and returns full project path based on passed in namespace' do
+ expect(described_class.restore_full_path(namespace: 'group1', project: 'project1')).to eq('group1/project1')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/jira/middleware_spec.rb b/spec/lib/gitlab/jira/middleware_spec.rb
new file mode 100644
index 00000000000..1fe22b145a6
--- /dev/null
+++ b/spec/lib/gitlab/jira/middleware_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Jira::Middleware do
+ let(:app) { double(:app) }
+ let(:middleware) { described_class.new(app) }
+ let(:jira_user_agent) { 'Jira DVCS Connector Vertigo/5.0.0-D20170810T012915' }
+
+ describe '.jira_dvcs_connector?' do
+ it 'returns true when DVCS connector' do
+ expect(described_class.jira_dvcs_connector?('HTTP_USER_AGENT' => jira_user_agent)).to eq(true)
+ end
+
+ it 'returns true if user agent starts with "Jira DVCS Connector"' do
+ expect(described_class.jira_dvcs_connector?('HTTP_USER_AGENT' => 'Jira DVCS Connector')).to eq(true)
+ end
+
+ it 'returns false when not DVCS connector' do
+ expect(described_class.jira_dvcs_connector?('HTTP_USER_AGENT' => 'pokemon')).to eq(false)
+ end
+ end
+
+ describe '#call' do
+ it 'adjusts HTTP_AUTHORIZATION env when request from Jira DVCS user agent' do
+ expect(app).to receive(:call).with('HTTP_USER_AGENT' => jira_user_agent,
+ 'HTTP_AUTHORIZATION' => 'Bearer hash-123')
+
+ middleware.call('HTTP_USER_AGENT' => jira_user_agent, 'HTTP_AUTHORIZATION' => 'token hash-123')
+ end
+
+ it 'does not change HTTP_AUTHORIZATION env when request is not from Jira DVCS user agent' do
+ env = { 'HTTP_USER_AGENT' => 'Mozilla/5.0', 'HTTP_AUTHORIZATION' => 'token hash-123' }
+
+ expect(app).to receive(:call).with(env)
+
+ middleware.call(env)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
new file mode 100644
index 00000000000..ce22f36e9fd
--- /dev/null
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kas do
+ let(:jwt_secret) { SecureRandom.random_bytes(described_class::SECRET_LENGTH) }
+
+ before do
+ allow(described_class).to receive(:secret).and_return(jwt_secret)
+ end
+
+ describe '.verify_api_request' do
+ let(:payload) { { 'iss' => described_class::JWT_ISSUER } }
+
+ it 'returns nil if fails to validate the JWT' do
+ encoded_token = JWT.encode(payload, 'wrongsecret', 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers)).to be_nil
+ end
+
+ it 'returns the decoded JWT' do
+ encoded_token = JWT.encode(payload, described_class.secret, 'HS256')
+ headers = { described_class::INTERNAL_API_REQUEST_HEADER => encoded_token }
+
+ expect(described_class.verify_api_request(headers)).to eq([{ "iss" => described_class::JWT_ISSUER }, { "alg" => "HS256" }])
+ end
+ end
+
+ describe '.secret_path' do
+ it 'returns default gitlab config' do
+ expect(described_class.secret_path).to eq(Gitlab.config.gitlab_kas.secret_file)
+ end
+ end
+
+ describe '.ensure_secret!' do
+ context 'secret file exists' do
+ before do
+ allow(File).to receive(:exist?).with(Gitlab.config.gitlab_kas.secret_file).and_return(true)
+ end
+
+ it 'does not call write_secret' do
+ expect(described_class).not_to receive(:write_secret)
+
+ described_class.ensure_secret!
+ end
+ end
+
+ context 'secret file does not exist' do
+ before do
+ allow(File).to receive(:exist?).with(Gitlab.config.gitlab_kas.secret_file).and_return(false)
+ end
+
+ it 'calls write_secret' do
+ expect(described_class).to receive(:write_secret)
+
+ described_class.ensure_secret!
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
index 9600a70a95d..3f5661d4ca6 100644
--- a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
@@ -7,23 +7,27 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
described_class.new(
name: name,
namespace: namespace,
- creation_timestamp: '2020-04-14T00:08:30Z',
- endpoint_selector: endpoint_selector,
+ description: description,
+ selector: selector,
ingress: ingress,
egress: egress,
- description: description
+ labels: labels,
+ resource_version: resource_version
)
end
let(:resource) do
::Kubeclient::Resource.new(
- kind: partial_class_name,
- apiVersion: "cilium.io/v2",
+ apiVersion: Gitlab::Kubernetes::CiliumNetworkPolicy::API_VERSION,
+ kind: Gitlab::Kubernetes::CiliumNetworkPolicy::KIND,
metadata: { name: name, namespace: namespace, resourceVersion: resource_version },
- spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil }
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: egress },
+ description: description
)
end
+ let(:selector) { endpoint_selector }
+ let(:labels) { nil }
let(:name) { 'example-name' }
let(:namespace) { 'example-namespace' }
let(:endpoint_selector) { { matchLabels: { role: 'db' } } }
@@ -48,34 +52,14 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
]
end
- include_examples 'network policy common specs' do
- let(:selector) { endpoint_selector}
- let(:policy) do
- described_class.new(
- name: name,
- namespace: namespace,
- selector: selector,
- ingress: ingress,
- labels: labels,
- resource_version: resource_version
- )
- end
-
- let(:spec) { { endpointSelector: selector, ingress: ingress, egress: nil } }
- let(:metadata) { { name: name, namespace: namespace, resourceVersion: resource_version } }
- end
-
- describe '#generate' do
- subject { policy.generate }
-
- it { is_expected.to eq(resource) }
- end
+ include_examples 'network policy common specs'
describe '.from_yaml' do
let(:manifest) do
<<~POLICY
apiVersion: cilium.io/v2
kind: CiliumNetworkPolicy
+ description: example-description
metadata:
name: example-name
namespace: example-namespace
@@ -88,6 +72,9 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
- fromEndpoints:
- matchLabels:
project: myproject
+ egress:
+ - ports:
+ - port: 5978
POLICY
end
@@ -167,20 +154,22 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
describe '.from_resource' do
let(:resource) do
::Kubeclient::Resource.new(
+ description: description,
metadata: {
name: name, namespace: namespace, creationTimestamp: '2020-04-14T00:08:30Z',
labels: { app: 'foo' }, resourceVersion: resource_version
},
- spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil, labels: nil, description: nil }
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil, labels: nil }
)
end
let(:generated_resource) do
::Kubeclient::Resource.new(
- kind: partial_class_name,
- apiVersion: "cilium.io/v2",
+ apiVersion: Gitlab::Kubernetes::CiliumNetworkPolicy::API_VERSION,
+ kind: Gitlab::Kubernetes::CiliumNetworkPolicy::KIND,
+ description: description,
metadata: { name: name, namespace: namespace, resourceVersion: resource_version, labels: { app: 'foo' } },
- spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil }
+ spec: { endpointSelector: endpoint_selector, ingress: ingress }
)
end
@@ -197,7 +186,7 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
context 'with resource without metadata' do
let(:resource) do
::Kubeclient::Resource.new(
- spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil, labels: nil, description: nil }
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil, labels: nil }
)
end
@@ -214,4 +203,50 @@ RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
it { is_expected.to be_nil }
end
end
+
+ describe '#resource' do
+ subject { policy.resource }
+
+ let(:resource) do
+ {
+ apiVersion: Gitlab::Kubernetes::CiliumNetworkPolicy::API_VERSION,
+ kind: Gitlab::Kubernetes::CiliumNetworkPolicy::KIND,
+ metadata: { name: name, namespace: namespace, resourceVersion: resource_version },
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: egress },
+ description: description
+ }
+ end
+
+ it { is_expected.to eq(resource) }
+
+ context 'with labels' do
+ let(:labels) { { app: 'foo' } }
+
+ before do
+ resource[:metadata][:labels] = { app: 'foo' }
+ end
+
+ it { is_expected.to eq(resource) }
+ end
+
+ context 'without resource_version' do
+ let(:resource_version) { nil }
+
+ before do
+ resource[:metadata].delete(:resourceVersion)
+ end
+
+ it { is_expected.to eq(resource) }
+ end
+
+ context 'with nil egress' do
+ let(:egress) { nil }
+
+ before do
+ resource[:spec].delete(:egress)
+ end
+
+ it { is_expected.to eq(resource) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 8211b096d3b..90c11f29855 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -376,6 +376,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
[
:create_network_policy,
:get_network_policies,
+ :get_network_policy,
:update_network_policy,
:delete_network_policy
].each do |method|
@@ -400,6 +401,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
[
:create_cilium_network_policy,
:get_cilium_network_policies,
+ :get_cilium_network_policy,
:update_cilium_network_policy,
:delete_cilium_network_policy
].each do |method|
diff --git a/spec/lib/gitlab/kubernetes/network_policy_spec.rb b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
index 5d1dd5dec59..d3640c61d94 100644
--- a/spec/lib/gitlab/kubernetes/network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
@@ -7,21 +7,22 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
described_class.new(
name: name,
namespace: namespace,
- creation_timestamp: '2020-04-14T00:08:30Z',
- selector: pod_selector,
- policy_types: %w(Ingress Egress),
+ selector: selector,
ingress: ingress,
- egress: egress
+ labels: labels
)
end
let(:resource) do
::Kubeclient::Resource.new(
+ kind: Gitlab::Kubernetes::NetworkPolicy::KIND,
metadata: { name: name, namespace: namespace },
spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
)
end
+ let(:selector) { pod_selector }
+ let(:labels) { nil }
let(:name) { 'example-name' }
let(:namespace) { 'example-namespace' }
let(:pod_selector) { { matchLabels: { role: 'db' } } }
@@ -44,27 +45,7 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
]
end
- include_examples 'network policy common specs' do
- let(:selector) { pod_selector }
- let(:policy) do
- described_class.new(
- name: name,
- namespace: namespace,
- selector: selector,
- ingress: ingress,
- labels: labels
- )
- end
-
- let(:spec) { { podSelector: selector, policyTypes: ["Ingress"], ingress: ingress, egress: nil } }
- let(:metadata) { { name: name, namespace: namespace } }
- end
-
- describe '#generate' do
- subject { policy.generate }
-
- it { is_expected.to eq(resource) }
- end
+ include_examples 'network policy common specs'
describe '.from_yaml' do
let(:manifest) do
@@ -180,6 +161,7 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
let(:generated_resource) do
::Kubeclient::Resource.new(
+ kind: Gitlab::Kubernetes::NetworkPolicy::KIND,
metadata: { name: name, namespace: namespace, labels: { app: 'foo' } },
spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
)
@@ -215,4 +197,31 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
it { is_expected.to be_nil }
end
end
+
+ describe '#resource' do
+ subject { policy.resource }
+
+ let(:resource) do
+ {
+ kind: Gitlab::Kubernetes::NetworkPolicy::KIND,
+ metadata: { name: name, namespace: namespace },
+ spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
+ }
+ end
+
+ it { is_expected.to eq(resource) }
+
+ context 'with labels' do
+ let(:labels) { { app: 'foo' } }
+ let(:resource) do
+ {
+ kind: Gitlab::Kubernetes::NetworkPolicy::KIND,
+ metadata: { name: name, namespace: namespace, labels: { app: 'foo' } },
+ spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
+ }
+ end
+
+ it { is_expected.to eq(resource) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/lfs/client_spec.rb b/spec/lib/gitlab/lfs/client_spec.rb
new file mode 100644
index 00000000000..03563a632d6
--- /dev/null
+++ b/spec/lib/gitlab/lfs/client_spec.rb
@@ -0,0 +1,148 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Lfs::Client do
+ let(:base_url) { "https://example.com" }
+ let(:username) { 'user' }
+ let(:password) { 'password' }
+ let(:credentials) { { user: username, password: password, auth_method: 'password' } }
+
+ let(:basic_auth_headers) do
+ { 'Authorization' => "Basic #{Base64.strict_encode64("#{username}:#{password}")}" }
+ end
+
+ let(:upload_action) do
+ {
+ "href" => "#{base_url}/some/file",
+ "header" => {
+ "Key" => "value"
+ }
+ }
+ end
+
+ subject(:lfs_client) { described_class.new(base_url, credentials: credentials) }
+
+ describe '#batch' do
+ let_it_be(:objects) { create_list(:lfs_object, 3) }
+
+ context 'server returns 200 OK' do
+ it 'makes a successful batch request' do
+ stub = stub_batch(
+ objects: objects,
+ headers: basic_auth_headers
+ ).to_return(
+ status: 200,
+ body: { 'objects' => 'anything', 'transfer' => 'basic' }.to_json,
+ headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
+ )
+
+ result = lfs_client.batch('upload', objects)
+
+ expect(stub).to have_been_requested
+ expect(result).to eq('objects' => 'anything', 'transfer' => 'basic')
+ end
+ end
+
+ context 'server returns 400 error' do
+ it 'raises an error' do
+ stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
+
+ expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
+ end
+ end
+
+ context 'server returns 500 error' do
+ it 'raises an error' do
+ stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
+
+ expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
+ end
+ end
+
+ context 'server returns an exotic transfer method' do
+ it 'raises an error' do
+ stub_batch(
+ objects: objects,
+ headers: basic_auth_headers
+ ).to_return(
+ status: 200,
+ body: { 'transfer' => 'carrier-pigeon' }.to_json,
+ headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
+ )
+
+ expect { lfs_client.batch('upload', objects) }.to raise_error(/Unsupported transfer/)
+ end
+ end
+
+ def stub_batch(objects:, headers:, operation: 'upload', transfer: 'basic')
+ objects = objects.map { |o| { oid: o.oid, size: o.size } }
+ body = { operation: operation, 'transfers': [transfer], objects: objects }.to_json
+
+ stub_request(:post, base_url + '/info/lfs/objects/batch').with(body: body, headers: headers)
+ end
+ end
+
+ describe "#upload" do
+ let_it_be(:object) { create(:lfs_object) }
+
+ context 'server returns 200 OK to an authenticated request' do
+ it "makes an HTTP PUT with expected parameters" do
+ stub_upload(object: object, headers: upload_action['header']).to_return(status: 200)
+
+ lfs_client.upload(object, upload_action, authenticated: true)
+ end
+ end
+
+ context 'server returns 200 OK to an unauthenticated request' do
+ it "makes an HTTP PUT with expected parameters" do
+ stub = stub_upload(
+ object: object,
+ headers: basic_auth_headers.merge(upload_action['header'])
+ ).to_return(status: 200)
+
+ lfs_client.upload(object, upload_action, authenticated: false)
+
+ expect(stub).to have_been_requested
+ end
+ end
+
+ context 'LFS object has no file' do
+ let(:object) { LfsObject.new }
+
+ it 'makes an HJTT PUT with expected parameters' do
+ stub = stub_upload(
+ object: object,
+ headers: upload_action['header']
+ ).to_return(status: 200)
+
+ lfs_client.upload(object, upload_action, authenticated: true)
+
+ expect(stub).to have_been_requested
+ end
+ end
+
+ context 'server returns 400 error' do
+ it 'raises an error' do
+ stub_upload(object: object, headers: upload_action['header']).to_return(status: 400)
+
+ expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ end
+ end
+
+ context 'server returns 500 error' do
+ it 'raises an error' do
+ stub_upload(object: object, headers: upload_action['header']).to_return(status: 500)
+
+ expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ end
+ end
+
+ def stub_upload(object:, headers:)
+ stub_request(:put, upload_action['href']).with(
+ body: object.file.read,
+ headers: headers.merge('Content-Length' => object.size.to_s)
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/log_timestamp_formatter_spec.rb b/spec/lib/gitlab/log_timestamp_formatter_spec.rb
index e06baa2324f..b51d0fec15e 100644
--- a/spec/lib/gitlab/log_timestamp_formatter_spec.rb
+++ b/spec/lib/gitlab/log_timestamp_formatter_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::LogTimestampFormatter do
let(:formatted_timestamp) { Time.now.utc.iso8601(3) }
it 'logs the timestamp in UTC and ISO8601.3 format' do
- Timecop.freeze(Time.now) do
+ freeze_time do
expect(subject.call('', Time.now, '', '')).to include formatted_timestamp
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/importer_spec.rb b/spec/lib/gitlab/metrics/dashboard/importer_spec.rb
new file mode 100644
index 00000000000..8b705395a2c
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/importer_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Importer do
+ include MetricsDashboardHelpers
+
+ let_it_be(:dashboard_path) { '.gitlab/dashboards/sample_dashboard.yml' }
+ let_it_be(:project) { create(:project) }
+
+ before do
+ allow(subject).to receive(:dashboard_hash).and_return(dashboard_hash)
+ end
+
+ subject { described_class.new(dashboard_path, project) }
+
+ describe '.execute' do
+ context 'valid dashboard hash' do
+ let(:dashboard_hash) { load_sample_dashboard }
+
+ it 'imports metrics to database' do
+ expect { subject.execute }
+ .to change { PrometheusMetric.count }.from(0).to(3)
+ end
+ end
+
+ context 'invalid dashboard hash' do
+ let(:dashboard_hash) { {} }
+
+ it 'returns false' do
+ expect(subject.execute).to be(false)
+ end
+ end
+ end
+
+ describe '.execute!' do
+ context 'valid dashboard hash' do
+ let(:dashboard_hash) { load_sample_dashboard }
+
+ it 'imports metrics to database' do
+ expect { subject.execute }
+ .to change { PrometheusMetric.count }.from(0).to(3)
+ end
+ end
+
+ context 'invalid dashboard hash' do
+ let(:dashboard_hash) { {} }
+
+ it 'raises error' do
+ expect { subject.execute! }.to raise_error(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError,
+ 'root is missing required keys: dashboard, panel_groups')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb b/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
new file mode 100644
index 00000000000..09d5e048f6a
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
+ include MetricsDashboardHelpers
+
+ describe '#execute' do
+ let(:project) { create(:project) }
+ let(:dashboard_path) { 'path/to/dashboard.yml' }
+
+ subject { described_class.new(dashboard_hash, project: project, dashboard_path: dashboard_path) }
+
+ context 'valid dashboard' do
+ let(:dashboard_hash) { load_sample_dashboard }
+
+ context 'with all new metrics' do
+ it 'creates PrometheusMetrics' do
+ expect { subject.execute }.to change { PrometheusMetric.count }.by(3)
+ end
+ end
+
+ context 'with existing metrics' do
+ let!(:existing_metric) do
+ create(:prometheus_metric, {
+ project: project,
+ identifier: 'metric_b',
+ title: 'overwrite',
+ y_label: 'overwrite',
+ query: 'overwrite',
+ unit: 'overwrite',
+ legend: 'overwrite'
+ })
+ end
+
+ it 'updates existing PrometheusMetrics' do
+ described_class.new(dashboard_hash, project: project, dashboard_path: dashboard_path).execute
+
+ expect(existing_metric.reload.attributes.with_indifferent_access).to include({
+ title: 'Super Chart B',
+ y_label: 'y_label',
+ query: 'query',
+ unit: 'unit',
+ legend: 'Legend Label'
+ })
+ end
+
+ it 'creates new PrometheusMetrics' do
+ expect { subject.execute }.to change { PrometheusMetric.count }.by(2)
+ end
+
+ context 'with stale metrics' do
+ let!(:stale_metric) do
+ create(:prometheus_metric,
+ project: project,
+ identifier: 'stale_metric',
+ dashboard_path: dashboard_path,
+ group: 3
+ )
+ end
+
+ it 'deletes stale metrics' do
+ subject.execute
+
+ expect { stale_metric.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+ end
+
+ context 'invalid dashboard' do
+ let(:dashboard_hash) { {} }
+
+ it 'returns false' do
+ expect(subject.execute).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb
index d9987b67127..60010b9f257 100644
--- a/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb
@@ -8,20 +8,19 @@ RSpec.describe Gitlab::Metrics::Dashboard::Stages::TrackPanelType do
let(:project) { build_stubbed(:project) }
let(:environment) { build_stubbed(:environment, project: project) }
- describe '#transform!' do
+ describe '#transform!', :snowplow do
subject { described_class.new(project, dashboard, environment: environment) }
let(:dashboard) { load_sample_dashboard.deep_symbolize_keys }
it 'creates tracking event' do
- stub_application_setting(snowplow_enabled: true, snowplow_collector_hostname: 'localhost')
- allow(Gitlab::Tracking).to receive(:event).and_call_original
-
subject.transform!
- expect(Gitlab::Tracking).to have_received(:event)
- .with('MetricsDashboard::Chart', 'chart_rendered', { label: 'area-chart' })
- .at_least(:once)
+ expect_snowplow_event(
+ category: 'MetricsDashboard::Chart',
+ action: 'chart_rendered',
+ label: 'area-chart'
+ )
end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb b/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb
new file mode 100644
index 00000000000..3af8b51c889
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/transformers/yml/v1/prometheus_metrics_spec.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Transformers::Yml::V1::PrometheusMetrics do
+ include MetricsDashboardHelpers
+
+ describe '#execute' do
+ subject { described_class.new(dashboard_hash) }
+
+ context 'valid dashboard' do
+ let_it_be(:dashboard_hash) do
+ {
+ panel_groups: [{
+ panels: [
+ {
+ title: 'Panel 1 title',
+ y_label: 'Panel 1 y_label',
+ metrics: [
+ {
+ query_range: 'Panel 1 metric 1 query_range',
+ unit: 'Panel 1 metric 1 unit',
+ label: 'Panel 1 metric 1 label',
+ id: 'Panel 1 metric 1 id'
+ },
+ {
+ query: 'Panel 1 metric 2 query',
+ unit: 'Panel 1 metric 2 unit',
+ label: 'Panel 1 metric 2 label',
+ id: 'Panel 1 metric 2 id'
+ }
+ ]
+ },
+ {
+ title: 'Panel 2 title',
+ y_label: 'Panel 2 y_label',
+ metrics: [{
+ query_range: 'Panel 2 metric 1 query_range',
+ unit: 'Panel 2 metric 1 unit',
+ label: 'Panel 2 metric 1 label',
+ id: 'Panel 2 metric 1 id'
+ }]
+ }
+ ]
+ }]
+ }
+ end
+
+ let(:expected_metrics) do
+ [
+ {
+ title: 'Panel 1 title',
+ y_label: 'Panel 1 y_label',
+ query: "Panel 1 metric 1 query_range",
+ unit: 'Panel 1 metric 1 unit',
+ legend: 'Panel 1 metric 1 label',
+ identifier: 'Panel 1 metric 1 id',
+ group: 3,
+ common: false
+ },
+ {
+ title: 'Panel 1 title',
+ y_label: 'Panel 1 y_label',
+ query: 'Panel 1 metric 2 query',
+ unit: 'Panel 1 metric 2 unit',
+ legend: 'Panel 1 metric 2 label',
+ identifier: 'Panel 1 metric 2 id',
+ group: 3,
+ common: false
+ },
+ {
+ title: 'Panel 2 title',
+ y_label: 'Panel 2 y_label',
+ query: 'Panel 2 metric 1 query_range',
+ unit: 'Panel 2 metric 1 unit',
+ legend: 'Panel 2 metric 1 label',
+ identifier: 'Panel 2 metric 1 id',
+ group: 3,
+ common: false
+ }
+ ]
+ end
+
+ it 'returns collection of metrics with correct attributes' do
+ expect(subject.execute).to match_array(expected_metrics)
+ end
+ end
+
+ context 'invalid dashboard' do
+ let(:dashboard_hash) { {} }
+
+ it 'raises missing attribute error' do
+ expect { subject.execute }.to raise_error(
+ ::Gitlab::Metrics::Dashboard::Transformers::Errors::MissingAttribute, "Missing attribute: 'panel_groups'"
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
index 205e1000376..830d43169a9 100644
--- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
@@ -6,11 +6,12 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
include Gitlab::Routing.url_helpers
describe '#metrics_regex' do
+ let(:environment_id) { 1 }
let(:url_params) do
[
'foo',
'bar',
- 1,
+ environment_id,
{
start: '2019-08-02T05:43:09.000Z',
dashboard: 'config/prometheus/common_metrics.yml',
@@ -33,12 +34,42 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
subject { described_class.metrics_regex }
- context 'for metrics route' do
+ context 'for /-/environments/:environment_id/metrics route' do
let(:url) { metrics_namespace_project_environment_url(*url_params) }
it_behaves_like 'regex which matches url when expected'
end
+ context 'for /-/metrics?environment=:environment_id route' do
+ let(:url) { namespace_project_metrics_dashboard_url(*url_params) }
+ let(:url_params) do
+ [
+ 'namespace1',
+ 'project1',
+ {
+ environment: environment_id,
+ start: '2019-08-02T05:43:09.000Z',
+ dashboard: 'config/prometheus/common_metrics.yml',
+ group: 'awesome group',
+ anchor: 'title'
+ }
+ ]
+ end
+
+ let(:expected_params) do
+ {
+ 'url' => url,
+ 'namespace' => 'namespace1',
+ 'project' => 'project1',
+ 'environment' => "#{environment_id}",
+ 'query' => "?dashboard=config%2Fprometheus%2Fcommon_metrics.yml&environment=#{environment_id}&group=awesome+group&start=2019-08-02T05%3A43%3A09.000Z",
+ 'anchor' => '#title'
+ }
+ end
+
+ it_behaves_like 'regex which matches url when expected'
+ end
+
context 'for metrics_dashboard route' do
let(:url) { metrics_dashboard_namespace_project_environment_url(*url_params) }
@@ -47,16 +78,19 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
end
describe '#clusters_regex' do
- let(:url) do
- Gitlab::Routing.url_helpers.namespace_project_cluster_url(
+ let(:url) { Gitlab::Routing.url_helpers.namespace_project_cluster_url(*url_params) }
+ let(:url_params) do
+ [
'foo',
'bar',
'1',
- group: 'Cluster Health',
- title: 'Memory Usage',
- y_label: 'Memory 20(GiB)',
- anchor: 'title'
- )
+ {
+ group: 'Cluster Health',
+ title: 'Memory Usage',
+ y_label: 'Memory 20(GiB)',
+ anchor: 'title'
+ }
+ ]
end
let(:expected_params) do
@@ -73,6 +107,27 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
subject { described_class.clusters_regex }
it_behaves_like 'regex which matches url when expected'
+
+ context 'for metrics_dashboard route' do
+ let(:url) do
+ metrics_dashboard_namespace_project_cluster_url(
+ *url_params, cluster_type: :project, embedded: true, format: :json
+ )
+ end
+
+ let(:expected_params) do
+ {
+ 'url' => url,
+ 'namespace' => 'foo',
+ 'project' => 'bar',
+ 'cluster_id' => '1',
+ 'query' => '?cluster_type=project&embedded=true',
+ 'anchor' => nil
+ }
+ end
+
+ it_behaves_like 'regex which matches url when expected'
+ end
end
describe '#grafana_regex' do
@@ -103,15 +158,18 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
end
describe '#alert_regex' do
- let(:url) do
- Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(
+ let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params) }
+ let(:url_params) do
+ [
'foo',
'bar',
'1',
- start: '2020-02-10T12:59:49.938Z',
- end: '2020-02-10T20:59:49.938Z',
- anchor: "anchor"
- )
+ {
+ start: '2020-02-10T12:59:49.938Z',
+ end: '2020-02-10T20:59:49.938Z',
+ anchor: "anchor"
+ }
+ ]
end
let(:expected_params) do
@@ -128,6 +186,21 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
subject { described_class.alert_regex }
it_behaves_like 'regex which matches url when expected'
+
+ it_behaves_like 'regex which matches url when expected' do
+ let(:url) { Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(*url_params, format: :json) }
+
+ let(:expected_params) do
+ {
+ 'url' => url,
+ 'namespace' => 'foo',
+ 'project' => 'bar',
+ 'alert' => '1',
+ 'query' => nil,
+ 'anchor' => nil
+ }
+ end
+ end
end
describe '#build_dashboard_url' do
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb
index f0db1bd0d33..fdbba6c31b5 100644
--- a/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb
@@ -34,6 +34,17 @@ RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do
it { is_expected.to eq 'root is missing required keys: one' }
end
+
+ context 'when there is type mismatch' do
+ %w(null string boolean integer number array object).each do |expected_type|
+ context "on type: #{expected_type}" do
+ let(:type) { expected_type }
+ let(:details) { nil }
+
+ it { is_expected.to eq "'property_name' at root is not of type: #{expected_type}" }
+ end
+ end
+ end
end
context 'for nested object' do
@@ -52,8 +63,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do
let(:type) { expected_type }
let(:details) { nil }
- subject { described_class.new(error_hash).message }
-
it { is_expected.to eq "'property_name' at /nested_objects/0 is not of type: #{expected_type}" }
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator_spec.rb
index c4cda271408..eb67ea2b7da 100644
--- a/spec/lib/gitlab/metrics/dashboard/validator_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/validator_spec.rb
@@ -143,4 +143,56 @@ RSpec.describe Gitlab::Metrics::Dashboard::Validator do
end
end
end
+
+ describe '#errors' do
+ context 'valid dashboard schema' do
+ it 'returns no errors' do
+ expect(described_class.errors(valid_dashboard)).to eq []
+ end
+
+ context 'with duplicate metric_ids' do
+ it 'returns errors' do
+ expect(described_class.errors(duplicate_id_dashboard)).to eq [Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds.new]
+ end
+ end
+
+ context 'with dashboard_path and project' do
+ subject { described_class.errors(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
+
+ context 'with no conflicting metric identifiers in db' do
+ it { is_expected.to eq [] }
+ end
+
+ context 'with metric identifier present in current dashboard' do
+ before do
+ create(:prometheus_metric,
+ identifier: 'metric_a1',
+ dashboard_path: 'test/path.yml',
+ project: project
+ )
+ end
+
+ it { is_expected.to eq [] }
+ end
+
+ context 'with metric identifier present in another dashboard' do
+ before do
+ create(:prometheus_metric,
+ identifier: 'metric_a1',
+ dashboard_path: 'some/other/dashboard/path.yml',
+ project: project
+ )
+ end
+
+ it { is_expected.to eq [Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds.new] }
+ end
+ end
+ end
+
+ context 'invalid dashboard schema' do
+ it 'returns collection of validation errors' do
+ expect(described_class.errors(invalid_dashboard)).to all be_kind_of(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
index 2c5ef09f799..01cf47a7c58 100644
--- a/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
+++ b/spec/lib/gitlab/metrics/exporter/sidekiq_exporter_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Gitlab::Metrics::Exporter::SidekiqExporter do
monitoring: {
sidekiq_exporter: {
enabled: true,
+ log_enabled: false,
port: 0,
address: '127.0.0.1'
}
@@ -25,6 +26,29 @@ RSpec.describe Gitlab::Metrics::Exporter::SidekiqExporter do
it 'does start thread' do
expect(exporter.start).not_to be_nil
end
+
+ it 'does not enable logging by default' do
+ expect(exporter.log_filename).to eq(File::NULL)
+ end
+ end
+
+ context 'with logging enabled' do
+ before do
+ stub_config(
+ monitoring: {
+ sidekiq_exporter: {
+ enabled: true,
+ log_enabled: true,
+ port: 0,
+ address: '127.0.0.1'
+ }
+ }
+ )
+ end
+
+ it 'returns a valid log filename' do
+ expect(exporter.log_filename).to end_with('sidekiq_exporter.log')
+ end
end
context 'when port is already taken' do
diff --git a/spec/lib/gitlab/metrics/instrumentation_spec.rb b/spec/lib/gitlab/metrics/instrumentation_spec.rb
index 2729fbce974..b15e06a0861 100644
--- a/spec/lib/gitlab/metrics/instrumentation_spec.rb
+++ b/spec/lib/gitlab/metrics/instrumentation_spec.rb
@@ -12,6 +12,11 @@ RSpec.describe Gitlab::Metrics::Instrumentation do
text
end
+ def self.wat(text = 'wat')
+ text
+ end
+ private_class_method :wat
+
class << self
def buzz(text = 'buzz')
text
@@ -242,6 +247,7 @@ RSpec.describe Gitlab::Metrics::Instrumentation do
expect(described_class.instrumented?(@dummy.singleton_class)).to eq(true)
expect(@dummy.method(:foo).source_location.first).to match(/instrumentation\.rb/)
+ expect(@dummy.public_methods).to include(:foo)
end
it 'instruments all protected class methods' do
@@ -249,13 +255,16 @@ RSpec.describe Gitlab::Metrics::Instrumentation do
expect(described_class.instrumented?(@dummy.singleton_class)).to eq(true)
expect(@dummy.method(:flaky).source_location.first).to match(/instrumentation\.rb/)
+ expect(@dummy.protected_methods).to include(:flaky)
end
- it 'instruments all private instance methods' do
+ it 'instruments all private class methods' do
described_class.instrument_methods(@dummy)
expect(described_class.instrumented?(@dummy.singleton_class)).to eq(true)
expect(@dummy.method(:buzz).source_location.first).to match(/instrumentation\.rb/)
+ expect(@dummy.private_methods).to include(:buzz)
+ expect(@dummy.private_methods).to include(:wat)
end
it 'only instruments methods directly defined in the module' do
@@ -290,6 +299,7 @@ RSpec.describe Gitlab::Metrics::Instrumentation do
expect(described_class.instrumented?(@dummy)).to eq(true)
expect(@dummy.new.method(:bar).source_location.first).to match(/instrumentation\.rb/)
+ expect(@dummy.public_instance_methods).to include(:bar)
end
it 'instruments all protected instance methods' do
@@ -297,6 +307,7 @@ RSpec.describe Gitlab::Metrics::Instrumentation do
expect(described_class.instrumented?(@dummy)).to eq(true)
expect(@dummy.new.method(:chaf).source_location.first).to match(/instrumentation\.rb/)
+ expect(@dummy.protected_instance_methods).to include(:chaf)
end
it 'instruments all private instance methods' do
@@ -304,6 +315,7 @@ RSpec.describe Gitlab::Metrics::Instrumentation do
expect(described_class.instrumented?(@dummy)).to eq(true)
expect(@dummy.new.method(:wadus).source_location.first).to match(/instrumentation\.rb/)
+ expect(@dummy.private_instance_methods).to include(:wadus)
end
it 'only instruments methods directly defined in the module' do
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index 825c91b6cb4..fb5436a90e3 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::Metrics::MethodCall do
end
around do |example|
- Timecop.freeze do
+ freeze_time do
example.run
end
end
diff --git a/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
new file mode 100644
index 00000000000..7f05f35c941
--- /dev/null
+++ b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Samplers::ActionCableSampler do
+ let(:action_cable) { instance_double(ActionCable::Server::Base) }
+
+ subject { described_class.new(action_cable: action_cable) }
+
+ describe '#interval' do
+ it 'samples every five seconds by default' do
+ expect(subject.interval).to eq(5)
+ end
+
+ it 'samples at other intervals if requested' do
+ expect(described_class.new(11).interval).to eq(11)
+ end
+ end
+
+ describe '#sample' do
+ let(:pool) { instance_double(Concurrent::ThreadPoolExecutor) }
+
+ before do
+ allow(action_cable).to receive_message_chain(:worker_pool, :executor).and_return(pool)
+ allow(action_cable).to receive(:connections).and_return([])
+ allow(pool).to receive(:min_length).and_return(1)
+ allow(pool).to receive(:max_length).and_return(2)
+ allow(pool).to receive(:length).and_return(3)
+ allow(pool).to receive(:largest_length).and_return(4)
+ allow(pool).to receive(:completed_task_count).and_return(5)
+ allow(pool).to receive(:queue_length).and_return(6)
+ end
+
+ shared_examples 'collects metrics' do |expected_labels|
+ it 'includes active connections' do
+ expect(subject.metrics[:active_connections]).to receive(:set).with(expected_labels, 0)
+
+ subject.sample
+ end
+
+ it 'includes minimum worker pool size' do
+ expect(subject.metrics[:pool_min_size]).to receive(:set).with(expected_labels, 1)
+
+ subject.sample
+ end
+
+ it 'includes maximum worker pool size' do
+ expect(subject.metrics[:pool_max_size]).to receive(:set).with(expected_labels, 2)
+
+ subject.sample
+ end
+
+ it 'includes current worker pool size' do
+ expect(subject.metrics[:pool_current_size]).to receive(:set).with(expected_labels, 3)
+
+ subject.sample
+ end
+
+ it 'includes largest worker pool size' do
+ expect(subject.metrics[:pool_largest_size]).to receive(:set).with(expected_labels, 4)
+
+ subject.sample
+ end
+
+ it 'includes worker pool completed task count' do
+ expect(subject.metrics[:pool_completed_tasks]).to receive(:set).with(expected_labels, 5)
+
+ subject.sample
+ end
+
+ it 'includes worker pool pending task count' do
+ expect(subject.metrics[:pool_pending_tasks]).to receive(:set).with(expected_labels, 6)
+
+ subject.sample
+ end
+ end
+
+ context 'for in-app mode' do
+ before do
+ expect(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(true)
+ end
+
+ it_behaves_like 'collects metrics', server_mode: 'in-app'
+ end
+
+ context 'for standalone mode' do
+ before do
+ expect(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(false)
+ end
+
+ it_behaves_like 'collects metrics', server_mode: 'standalone'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
index 59a70ac74a5..eb6c83096b9 100644
--- a/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
+++ b/spec/lib/gitlab/metrics/samplers/ruby_sampler_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Metrics::Samplers::RubySampler do
describe '#initialize' do
it 'sets process_start_time_seconds' do
- Timecop.freeze do
+ freeze_time do
expect(sampler.metrics[:process_start_time_seconds].get).to eq(Time.now.to_i)
end
end
diff --git a/spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb b/spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb
new file mode 100644
index 00000000000..59ec743f6ca
--- /dev/null
+++ b/spec/lib/gitlab/middleware/multipart/handler_for_jwt_params_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Middleware::Multipart::HandlerForJWTParams do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:env) { Rack::MockRequest.env_for('/', method: 'post', params: {}) }
+ let_it_be(:message) { { 'rewritten_fields' => {} } }
+
+ describe '#allowed_paths' do
+ let_it_be(:expected_allowed_paths) do
+ [
+ Dir.tmpdir,
+ ::FileUploader.root,
+ ::Gitlab.config.uploads.storage_path,
+ ::JobArtifactUploader.workhorse_upload_path,
+ ::LfsObjectUploader.workhorse_upload_path,
+ File.join(Rails.root, 'public/uploads/tmp')
+ ]
+ end
+
+ let_it_be(:expected_with_packages_path) { expected_allowed_paths + [::Packages::PackageFileUploader.workhorse_upload_path] }
+
+ subject { described_class.new(env, message).send(:allowed_paths) }
+
+ where(:package_features_enabled, :object_storage_enabled, :direct_upload_enabled, :expected_paths) do
+ false | false | true | :expected_allowed_paths
+ false | false | false | :expected_allowed_paths
+ false | true | true | :expected_allowed_paths
+ false | true | false | :expected_allowed_paths
+ true | false | true | :expected_with_packages_path
+ true | false | false | :expected_with_packages_path
+ true | true | true | :expected_allowed_paths
+ true | true | false | :expected_with_packages_path
+ end
+
+ with_them do
+ before do
+ stub_config(packages: {
+ enabled: package_features_enabled,
+ object_store: {
+ enabled: object_storage_enabled,
+ direct_upload: direct_upload_enabled
+ },
+ storage_path: '/any/dir'
+ })
+ end
+
+ it { is_expected.to eq(send(expected_paths)) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/middleware/multipart/handler_spec.rb b/spec/lib/gitlab/middleware/multipart/handler_spec.rb
new file mode 100644
index 00000000000..aac3f00defe
--- /dev/null
+++ b/spec/lib/gitlab/middleware/multipart/handler_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Middleware::Multipart::Handler do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:env) { Rack::MockRequest.env_for('/', method: 'post', params: {}) }
+ let_it_be(:message) { { 'rewritten_fields' => {} } }
+
+ describe '#allowed_paths' do
+ let_it_be(:expected_allowed_paths) do
+ [
+ Dir.tmpdir,
+ ::FileUploader.root,
+ ::Gitlab.config.uploads.storage_path,
+ ::JobArtifactUploader.workhorse_upload_path,
+ ::LfsObjectUploader.workhorse_upload_path,
+ File.join(Rails.root, 'public/uploads/tmp')
+ ]
+ end
+
+ let_it_be(:expected_with_packages_path) { expected_allowed_paths + [::Packages::PackageFileUploader.workhorse_upload_path] }
+
+ subject { described_class.new(env, message).send(:allowed_paths) }
+
+ where(:package_features_enabled, :object_storage_enabled, :direct_upload_enabled, :expected_paths) do
+ false | false | true | :expected_allowed_paths
+ false | false | false | :expected_allowed_paths
+ false | true | true | :expected_allowed_paths
+ false | true | false | :expected_allowed_paths
+ true | false | true | :expected_with_packages_path
+ true | false | false | :expected_with_packages_path
+ true | true | true | :expected_allowed_paths
+ true | true | false | :expected_with_packages_path
+ end
+
+ with_them do
+ before do
+ stub_config(packages: {
+ enabled: package_features_enabled,
+ object_store: {
+ enabled: object_storage_enabled,
+ direct_upload: direct_upload_enabled
+ },
+ storage_path: '/any/dir'
+ })
+ end
+
+ it { is_expected.to eq(send(expected_paths)) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/middleware/multipart_spec.rb b/spec/lib/gitlab/middleware/multipart_spec.rb
deleted file mode 100644
index 3b64fe335e8..00000000000
--- a/spec/lib/gitlab/middleware/multipart_spec.rb
+++ /dev/null
@@ -1,313 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require 'tempfile'
-
-RSpec.describe Gitlab::Middleware::Multipart do
- include_context 'multipart middleware context'
-
- RSpec.shared_examples_for 'multipart upload files' do
- it 'opens top-level files' do
- Tempfile.open('top-level') do |tempfile|
- rewritten = { 'file' => tempfile.path }
- in_params = { 'file.name' => original_filename, 'file.path' => file_path, 'file.remote_id' => remote_id, 'file.size' => file_size }
- env = post_env(rewritten, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
-
- expect_uploaded_file(tempfile, %w(file))
-
- middleware.call(env)
- end
- end
-
- it 'opens files one level deep' do
- Tempfile.open('one-level') do |tempfile|
- rewritten = { 'user[avatar]' => tempfile.path }
- in_params = { 'user' => { 'avatar' => { '.name' => original_filename, '.path' => file_path, '.remote_id' => remote_id, '.size' => file_size } } }
- env = post_env(rewritten, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
-
- expect_uploaded_file(tempfile, %w(user avatar))
-
- middleware.call(env)
- end
- end
-
- it 'opens files two levels deep' do
- Tempfile.open('two-levels') do |tempfile|
- in_params = { 'project' => { 'milestone' => { 'themesong' => { '.name' => original_filename, '.path' => file_path, '.remote_id' => remote_id, '.size' => file_size } } } }
- rewritten = { 'project[milestone][themesong]' => tempfile.path }
- env = post_env(rewritten, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
-
- expect_uploaded_file(tempfile, %w(project milestone themesong))
-
- middleware.call(env)
- end
- end
-
- def expect_uploaded_file(tempfile, path)
- expect(app).to receive(:call) do |env|
- file = get_params(env).dig(*path)
- expect(file).to be_a(::UploadedFile)
- expect(file.original_filename).to eq(original_filename)
-
- if remote_id
- expect(file.remote_id).to eq(remote_id)
- expect(file.path).to be_nil
- else
- expect(file.path).to eq(File.realpath(tempfile.path))
- expect(file.remote_id).to be_nil
- end
- end
- end
- end
-
- RSpec.shared_examples_for 'handling CI artifact upload' do
- it 'uploads both file and metadata' do
- Tempfile.open('file') do |file|
- Tempfile.open('metadata') do |metadata|
- rewritten = { 'file' => file.path, 'metadata' => metadata.path }
- in_params = { 'file.name' => 'file.txt', 'file.path' => file_path, 'file.remote_id' => file_remote_id, 'file.size' => file_size, 'metadata.name' => 'metadata.gz' }
- env = post_env(rewritten, in_params, Gitlab::Workhorse.secret, 'gitlab-workhorse')
-
- with_expected_uploaded_artifact_files(file, metadata) do |uploaded_file, uploaded_metadata|
- expect(uploaded_file).to be_a(::UploadedFile)
- expect(uploaded_file.original_filename).to eq('file.txt')
-
- if file_remote_id
- expect(uploaded_file.remote_id).to eq(file_remote_id)
- expect(uploaded_file.size).to eq(file_size)
- expect(uploaded_file.path).to be_nil
- else
- expect(uploaded_file.path).to eq(File.realpath(file.path))
- expect(uploaded_file.remote_id).to be_nil
- end
-
- expect(uploaded_metadata).to be_a(::UploadedFile)
- expect(uploaded_metadata.original_filename).to eq('metadata.gz')
- expect(uploaded_metadata.path).to eq(File.realpath(metadata.path))
- expect(uploaded_metadata.remote_id).to be_nil
- end
-
- middleware.call(env)
- end
- end
- end
-
- def with_expected_uploaded_artifact_files(file, metadata)
- expect(app).to receive(:call) do |env|
- file = get_params(env).dig('file')
- metadata = get_params(env).dig('metadata')
-
- yield file, metadata
- end
- end
- end
-
- it 'rejects headers signed with the wrong secret' do
- env = post_env({ 'file' => '/var/empty/nonesuch' }, {}, 'x' * 32, 'gitlab-workhorse')
-
- expect { middleware.call(env) }.to raise_error(JWT::VerificationError)
- end
-
- it 'rejects headers signed with the wrong issuer' do
- env = post_env({ 'file' => '/var/empty/nonesuch' }, {}, Gitlab::Workhorse.secret, 'acme-inc')
-
- expect { middleware.call(env) }.to raise_error(JWT::InvalidIssuerError)
- end
-
- context 'with invalid rewritten field' do
- invalid_field_names = [
- '[file]',
- ';file',
- 'file]',
- ';file]',
- 'file]]',
- 'file;;'
- ]
-
- invalid_field_names.each do |invalid_field_name|
- it "rejects invalid rewritten field name #{invalid_field_name}" do
- env = post_env({ invalid_field_name => nil }, {}, Gitlab::Workhorse.secret, 'gitlab-workhorse')
-
- expect { middleware.call(env) }.to raise_error(RuntimeError, "invalid field: \"#{invalid_field_name}\"")
- end
- end
- end
-
- context 'with remote file' do
- let(:remote_id) { 'someid' }
- let(:file_size) { 300 }
- let(:file_path) { '' }
-
- it_behaves_like 'multipart upload files'
- end
-
- context 'with remote file and a file path set' do
- let(:remote_id) { 'someid' }
- let(:file_size) { 300 }
- let(:file_path) { 'not_a_valid_file_path' } # file path will come from the rewritten_fields
-
- it_behaves_like 'multipart upload files'
- end
-
- context 'with local file' do
- let(:remote_id) { nil }
- let(:file_size) { nil }
- let(:file_path) { 'not_a_valid_file_path' } # file path will come from the rewritten_fields
-
- it_behaves_like 'multipart upload files'
- end
-
- context 'with remote CI artifact upload' do
- let(:file_remote_id) { 'someid' }
- let(:file_size) { 300 }
- let(:file_path) { 'not_a_valid_file_path' } # file path will come from the rewritten_fields
-
- it_behaves_like 'handling CI artifact upload'
- end
-
- context 'with local CI artifact upload' do
- let(:file_remote_id) { nil }
- let(:file_size) { nil }
- let(:file_path) { 'not_a_valid_file_path' } # file path will come from the rewritten_fields
-
- it_behaves_like 'handling CI artifact upload'
- end
-
- it 'allows files in uploads/tmp directory' do
- with_tmp_dir('public/uploads/tmp') do |dir, env|
- expect(app).to receive(:call) do |env|
- expect(get_params(env)['file']).to be_a(::UploadedFile)
- end
-
- middleware.call(env)
- end
- end
-
- it 'allows files in the job artifact upload path' do
- with_tmp_dir('artifacts') do |dir, env|
- expect(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(File.join(dir, 'artifacts'))
- expect(app).to receive(:call) do |env|
- expect(get_params(env)['file']).to be_a(::UploadedFile)
- end
-
- middleware.call(env)
- end
- end
-
- it 'allows files in the lfs upload path' do
- with_tmp_dir('lfs-objects') do |dir, env|
- expect(LfsObjectUploader).to receive(:workhorse_upload_path).and_return(File.join(dir, 'lfs-objects'))
- expect(app).to receive(:call) do |env|
- expect(get_params(env)['file']).to be_a(::UploadedFile)
- end
-
- middleware.call(env)
- end
- end
-
- it 'allows symlinks for uploads dir' do
- Tempfile.open('two-levels') do |tempfile|
- symlinked_dir = '/some/dir/uploads'
- symlinked_path = File.join(symlinked_dir, File.basename(tempfile.path))
- env = post_env({ 'file' => symlinked_path }, { 'file.name' => original_filename, 'file.path' => symlinked_path }, Gitlab::Workhorse.secret, 'gitlab-workhorse')
-
- allow(FileUploader).to receive(:root).and_return(symlinked_dir)
- allow(UploadedFile).to receive(:allowed_paths).and_return([symlinked_dir, Gitlab.config.uploads.storage_path])
- allow(File).to receive(:realpath).and_call_original
- allow(File).to receive(:realpath).with(symlinked_dir).and_return(Dir.tmpdir)
- allow(File).to receive(:realpath).with(symlinked_path).and_return(tempfile.path)
- allow(File).to receive(:exist?).and_call_original
- allow(File).to receive(:exist?).with(symlinked_dir).and_return(true)
-
- # override Dir.tmpdir because this dir is in the list of allowed paths
- # and it would match FileUploader.root path (which in this test is linked
- # to /tmp too)
- allow(Dir).to receive(:tmpdir).and_return(File.join(Dir.tmpdir, 'tmpsubdir'))
-
- expect(app).to receive(:call) do |env|
- expect(get_params(env)['file']).to be_a(::UploadedFile)
- end
-
- middleware.call(env)
- end
- end
-
- describe '#call' do
- context 'with packages storage' do
- using RSpec::Parameterized::TableSyntax
-
- let(:storage_path) { 'shared/packages' }
-
- RSpec.shared_examples 'allowing the multipart upload' do
- it 'allows files to be uploaded' do
- with_tmp_dir('tmp/uploads', storage_path) do |dir, env|
- allow(Packages::PackageFileUploader).to receive(:root).and_return(File.join(dir, storage_path))
-
- expect(app).to receive(:call) do |env|
- expect(get_params(env)['file']).to be_a(::UploadedFile)
- end
-
- middleware.call(env)
- end
- end
- end
-
- RSpec.shared_examples 'not allowing the multipart upload when package upload path is used' do
- it 'does not allow files to be uploaded' do
- with_tmp_dir('tmp/uploads', storage_path) do |dir, env|
- # with_tmp_dir sets the same workhorse_upload_path for all Uploaders,
- # so we have to prevent JobArtifactUploader and LfsObjectUploader to
- # allow the tested path
- allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(Dir.tmpdir)
- allow(LfsObjectUploader).to receive(:workhorse_upload_path).and_return(Dir.tmpdir)
-
- status, headers, body = middleware.call(env)
-
- expect(status).to eq(400)
- expect(headers).to eq({ 'Content-Type' => 'text/plain' })
- expect(body).to start_with('insecure path used')
- end
- end
- end
-
- RSpec.shared_examples 'adding package storage to multipart allowed paths' do
- before do
- expect(::Packages::PackageFileUploader).to receive(:workhorse_upload_path).and_call_original
- end
-
- it_behaves_like 'allowing the multipart upload'
- end
-
- RSpec.shared_examples 'not adding package storage to multipart allowed paths' do
- before do
- expect(::Packages::PackageFileUploader).not_to receive(:workhorse_upload_path)
- end
-
- it_behaves_like 'not allowing the multipart upload when package upload path is used'
- end
-
- where(:object_storage_enabled, :direct_upload_enabled, :example_name) do
- false | true | 'adding package storage to multipart allowed paths'
- false | false | 'adding package storage to multipart allowed paths'
- true | true | 'not adding package storage to multipart allowed paths'
- true | false | 'adding package storage to multipart allowed paths'
- end
-
- with_them do
- before do
- stub_config(packages: {
- enabled: true,
- object_store: {
- enabled: object_storage_enabled,
- direct_upload: direct_upload_enabled
- },
- storage_path: storage_path
- })
- end
-
- it_behaves_like params[:example_name]
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/middleware/multipart_with_handler_for_jwt_params_spec.rb b/spec/lib/gitlab/middleware/multipart_with_handler_for_jwt_params_spec.rb
new file mode 100644
index 00000000000..875e3820011
--- /dev/null
+++ b/spec/lib/gitlab/middleware/multipart_with_handler_for_jwt_params_spec.rb
@@ -0,0 +1,171 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Middleware::Multipart do
+ include MultipartHelpers
+
+ describe '#call' do
+ let(:app) { double(:app) }
+ let(:middleware) { described_class.new(app) }
+ let(:secret) { Gitlab::Workhorse.secret }
+ let(:issuer) { 'gitlab-workhorse' }
+
+ subject do
+ env = post_env(
+ rewritten_fields: rewritten_fields,
+ params: params,
+ secret: secret,
+ issuer: issuer
+ )
+ middleware.call(env)
+ end
+
+ before do
+ stub_feature_flags(upload_middleware_jwt_params_handler: true)
+ end
+
+ context 'remote file mode' do
+ let(:mode) { :remote }
+
+ it_behaves_like 'handling all upload parameters conditions'
+
+ context 'and a path set' do
+ include_context 'with one temporary file for multipart'
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(key: 'file', filename: filename, remote_id: remote_id).merge('file.path' => '/should/not/be/read') }
+
+ it 'builds an UploadedFile' do
+ expect_uploaded_files(original_filename: filename, remote_id: remote_id, size: uploaded_file.size, params_path: %w(file))
+
+ subject
+ end
+ end
+ end
+
+ context 'local file mode' do
+ let(:mode) { :local }
+
+ it_behaves_like 'handling all upload parameters conditions'
+
+ context 'when file is' do
+ include_context 'with one temporary file for multipart'
+
+ let(:allowed_paths) { [Dir.tmpdir] }
+
+ before do
+ expect_next_instance_of(::Gitlab::Middleware::Multipart::HandlerForJWTParams) do |handler|
+ expect(handler).to receive(:allowed_paths).and_return(allowed_paths)
+ end
+ end
+
+ context 'in allowed paths' do
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename) }
+
+ it 'builds an UploadedFile' do
+ expect_uploaded_files(filepath: uploaded_filepath, original_filename: filename, size: uploaded_file.size, params_path: %w(file))
+
+ subject
+ end
+ end
+
+ context 'not in allowed paths' do
+ let(:allowed_paths) { [] }
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file') }
+
+ it 'returns an error' do
+ result = subject
+
+ expect(result[0]).to eq(400)
+ expect(result[2]).to include('insecure path used')
+ end
+ end
+ end
+ end
+
+ context 'with dummy params in remote mode' do
+ let(:rewritten_fields) { { 'file' => 'should/not/be/read' } }
+ let(:params) { upload_parameters_for(key: 'file') }
+ let(:mode) { :remote }
+
+ context 'with an invalid secret' do
+ let(:secret) { 'INVALID_SECRET' }
+
+ it { expect { subject }.to raise_error(JWT::VerificationError) }
+ end
+
+ context 'with an invalid issuer' do
+ let(:issuer) { 'INVALID_ISSUER' }
+
+ it { expect { subject }.to raise_error(JWT::InvalidIssuerError) }
+ end
+
+ context 'with invalid rewritten field key' do
+ invalid_keys = [
+ '[file]',
+ ';file',
+ 'file]',
+ ';file]',
+ 'file]]',
+ 'file;;'
+ ]
+
+ invalid_keys.each do |invalid_key|
+ context invalid_key do
+ let(:rewritten_fields) { { invalid_key => 'should/not/be/read' } }
+
+ it { expect { subject }.to raise_error(RuntimeError, "invalid field: \"#{invalid_key}\"") }
+ end
+ end
+ end
+
+ context 'with invalid key in parameters' do
+ include_context 'with one temporary file for multipart'
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'wrong_key', filename: filename, remote_id: remote_id) }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(RuntimeError, 'Empty JWT param: file.gitlab-workhorse-upload')
+ end
+ end
+
+ context 'with a modified JWT payload' do
+ include_context 'with one temporary file for multipart'
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:crafted_payload) { Base64.urlsafe_encode64({ 'path' => 'test' }.to_json) }
+ let(:params) do
+ upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename, remote_id: remote_id).tap do |params|
+ header, _, sig = params['file.gitlab-workhorse-upload'].split('.')
+ params['file.gitlab-workhorse-upload'] = [header, crafted_payload, sig].join('.')
+ end
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(JWT::VerificationError, 'Signature verification raised')
+ end
+ end
+
+ context 'with a modified JWT sig' do
+ include_context 'with one temporary file for multipart'
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) do
+ upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename, remote_id: remote_id).tap do |params|
+ header, payload, sig = params['file.gitlab-workhorse-upload'].split('.')
+ params['file.gitlab-workhorse-upload'] = [header, payload, "#{sig}modified"].join('.')
+ end
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(JWT::VerificationError, 'Signature verification raised')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/middleware/multipart_with_handler_spec.rb b/spec/lib/gitlab/middleware/multipart_with_handler_spec.rb
new file mode 100644
index 00000000000..742a5639ace
--- /dev/null
+++ b/spec/lib/gitlab/middleware/multipart_with_handler_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Middleware::Multipart do
+ include MultipartHelpers
+
+ describe '#call' do
+ let(:app) { double(:app) }
+ let(:middleware) { described_class.new(app) }
+ let(:secret) { Gitlab::Workhorse.secret }
+ let(:issuer) { 'gitlab-workhorse' }
+
+ subject do
+ env = post_env(
+ rewritten_fields: rewritten_fields,
+ params: params,
+ secret: secret,
+ issuer: issuer
+ )
+ middleware.call(env)
+ end
+
+ before do
+ stub_feature_flags(upload_middleware_jwt_params_handler: false)
+ end
+
+ context 'remote file mode' do
+ let(:mode) { :remote }
+
+ it_behaves_like 'handling all upload parameters conditions'
+
+ context 'and a path set' do
+ include_context 'with one temporary file for multipart'
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(key: 'file', filename: filename, remote_id: remote_id).merge('file.path' => '/should/not/be/read') }
+
+ it 'builds an UploadedFile' do
+ expect_uploaded_files(original_filename: filename, remote_id: remote_id, size: uploaded_file.size, params_path: %w(file))
+
+ subject
+ end
+ end
+ end
+
+ context 'local file mode' do
+ let(:mode) { :local }
+
+ it_behaves_like 'handling all upload parameters conditions'
+
+ context 'when file is' do
+ include_context 'with one temporary file for multipart'
+
+ let(:allowed_paths) { [Dir.tmpdir] }
+
+ before do
+ expect_next_instance_of(::Gitlab::Middleware::Multipart::Handler) do |handler|
+ expect(handler).to receive(:allowed_paths).and_return(allowed_paths)
+ end
+ end
+
+ context 'in allowed paths' do
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file', filename: filename) }
+
+ it 'builds an UploadedFile' do
+ expect_uploaded_files(filepath: uploaded_filepath, original_filename: filename, size: uploaded_file.size, params_path: %w(file))
+
+ subject
+ end
+ end
+
+ context 'not in allowed paths' do
+ let(:allowed_paths) { [] }
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'file') }
+
+ it 'returns an error' do
+ result = subject
+
+ expect(result[0]).to eq(400)
+ expect(result[2]).to include('insecure path used')
+ end
+ end
+ end
+ end
+
+ context 'with dummy params in remote mode' do
+ let(:rewritten_fields) { { 'file' => 'should/not/be/read' } }
+ let(:params) { upload_parameters_for(key: 'file') }
+ let(:mode) { :remote }
+
+ context 'with an invalid secret' do
+ let(:secret) { 'INVALID_SECRET' }
+
+ it { expect { subject }.to raise_error(JWT::VerificationError) }
+ end
+
+ context 'with an invalid issuer' do
+ let(:issuer) { 'INVALID_ISSUER' }
+
+ it { expect { subject }.to raise_error(JWT::InvalidIssuerError) }
+ end
+
+ context 'with invalid rewritten field key' do
+ invalid_keys = [
+ '[file]',
+ ';file',
+ 'file]',
+ ';file]',
+ 'file]]',
+ 'file;;'
+ ]
+
+ invalid_keys.each do |invalid_key|
+ context invalid_key do
+ let(:rewritten_fields) { { invalid_key => 'should/not/be/read' } }
+
+ it { expect { subject }.to raise_error(RuntimeError, "invalid field: \"#{invalid_key}\"") }
+ end
+ end
+ end
+
+ context 'with invalid key in parameters' do
+ include_context 'with one temporary file for multipart'
+
+ let(:rewritten_fields) { rewritten_fields_hash('file' => uploaded_filepath) }
+ let(:params) { upload_parameters_for(filepath: uploaded_filepath, key: 'wrong_key', filename: filename, remote_id: remote_id) }
+
+ it 'builds no UploadedFile' do
+ expect(app).to receive(:call) do |env|
+ received_params = get_params(env)
+ expect(received_params['file']).to be_nil
+ expect(received_params['wrong_key']).to be_nil
+ end
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/middleware/same_site_cookies_spec.rb b/spec/lib/gitlab/middleware/same_site_cookies_spec.rb
index 7c5262ca318..2d1a9b2eee2 100644
--- a/spec/lib/gitlab/middleware/same_site_cookies_spec.rb
+++ b/spec/lib/gitlab/middleware/same_site_cookies_spec.rb
@@ -3,18 +3,24 @@
require 'spec_helper'
RSpec.describe Gitlab::Middleware::SameSiteCookies do
+ using RSpec::Parameterized::TableSyntax
include Rack::Test::Methods
+ let(:user_agent) { nil }
let(:mock_app) do
Class.new do
- attr_reader :cookies
+ attr_reader :cookies, :user_agent
def initialize(cookies)
@cookies = cookies
end
def call(env)
- [200, { 'Set-Cookie' => cookies }, ['OK']]
+ [
+ 200,
+ { 'Set-Cookie' => cookies },
+ ['OK']
+ ]
end
end
end
@@ -29,7 +35,7 @@ RSpec.describe Gitlab::Middleware::SameSiteCookies do
let(:request) { Rack::MockRequest.new(subject) }
def do_request
- request.post('/some/path')
+ request.post('/some/path', { 'HTTP_USER_AGENT' => user_agent }.compact )
end
context 'without SSL enabled' do
@@ -63,6 +69,43 @@ RSpec.describe Gitlab::Middleware::SameSiteCookies do
end
end
+ context 'with different browsers' do
+ where(:description, :user_agent, :expected) do
+ "iOS 12" | "Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1" | false
+ "macOS 10.14 + Safari" | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Safari/605.1.15" | false
+ "macOS 10.14 + Opera" | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36 OPR/47.0.2631.55" | false
+ "macOS 10.14 + Chrome v80" | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.87 Safari/537.36" | true
+ "Chrome v41" | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36" | true
+ "Chrome v50" | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2348.1 Safari/537.36" | true
+ "Chrome v51" | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2718.15 Safari/537.36" | false
+ "Chrome v62" | "Mozilla/5.0 (Macintosh; Intel NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36" | false
+ "Chrome v66" | "Mozilla/5.0 (Linux; Android 4.4.2; Avvio_793 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.126 Mobile Safari/537.36" | false
+ "Chrome v67" | "Mozilla/5.0 (Linux; Android 7.1.1; SM-J510F Build/NMF26X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3371.0 Mobile Safari/537.36" | true
+ "Chrome v85" | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36" | true
+ "Chromium v66" | "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/66.0.3359.181 HeadlessChrome/66.0.3359.181 Safari/537.36" | false
+ "Chromium v85" | "Mozilla/5.0 (X11; Linux aarch64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/85.0.4183.59 Chrome/85.0.4183.59 Safari/537.36" | true
+ "UC Browser 12.0.4" | "Mozilla/5.0 (Linux; U; Android 4.4.4; zh-CN; A31 Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.108 UCBrowser/12.0.4.986 Mobile Safari/537.36" | false
+ "UC Browser 12.13.0" | "Mozilla/5.0 (Linux; U; Android 7.1.1; en-US; SM-C9000 Build/NMF26X) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.108 UCBrowser/12.13.0.1207 Mobile Safari/537.36" | false
+ "UC Browser 12.13.2" | "Mozilla/5.0 (Linux; U; Android 9; en-US; Redmi Note 7 Build/PQ3B.190801.002) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.108 UCBrowser/12.13.2.1208 Mobile Safari/537.36" | true
+ "UC Browser 12.13.5" | "Mozilla/5.0 (Linux; U; Android 5.1.1; en-US; PHICOMM C630 (CLUE L) Build/LMY47V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.108 UCBrowser/12.13.5.1209 Mobile Safari/537.36" | true
+ "Playstation" | "Mozilla/5.0 (PlayStation 4 2.51) AppleWebKit/537.73 (KHTML, like Gecko)" | true
+ end
+
+ with_them do
+ let(:cookies) { "thiscookie=12345" }
+
+ it 'returns expected SameSite status' do
+ response = do_request
+
+ if expected
+ expect(response['Set-Cookie']).to include('SameSite=None')
+ else
+ expect(response['Set-Cookie']).not_to include('SameSite=None')
+ end
+ end
+ end
+ end
+
context 'with single cookie' do
let(:cookies) { "thiscookie=12345" }
diff --git a/spec/lib/gitlab/pages/settings_spec.rb b/spec/lib/gitlab/pages/settings_spec.rb
index 7d4db073d73..f5424a98153 100644
--- a/spec/lib/gitlab/pages/settings_spec.rb
+++ b/spec/lib/gitlab/pages/settings_spec.rb
@@ -10,38 +10,14 @@ RSpec.describe Gitlab::Pages::Settings do
it { is_expected.to eq('the path') }
- it 'does not track calls' do
- expect(::Gitlab::ErrorTracking).not_to receive(:track_exception)
-
- subject
- end
-
- context 'when running under a web server' do
+ context 'when running under a web server outside of test mode' do
before do
+ allow(::Gitlab::Runtime).to receive(:test_suite?).and_return(false)
allow(::Gitlab::Runtime).to receive(:web_server?).and_return(true)
end
- it { is_expected.to eq('the path') }
-
- it 'does not track calls' do
- expect(::Gitlab::ErrorTracking).not_to receive(:track_exception)
-
- subject
- end
-
- context 'with the env var' do
- before do
- stub_env('GITLAB_PAGES_DENY_DISK_ACCESS', '1')
- end
-
- it { is_expected.to eq('the path') }
-
- it 'tracks a DiskAccessDenied exception' do
- expect(::Gitlab::ErrorTracking).to receive(:track_exception)
- .with(instance_of(described_class::DiskAccessDenied)).and_call_original
-
- subject
- end
+ it 'raises a DiskAccessDenied exception' do
+ expect { subject }.to raise_error(described_class::DiskAccessDenied)
end
end
end
diff --git a/spec/lib/gitlab/pages_transfer_spec.rb b/spec/lib/gitlab/pages_transfer_spec.rb
new file mode 100644
index 00000000000..4f0ee76b244
--- /dev/null
+++ b/spec/lib/gitlab/pages_transfer_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::PagesTransfer do
+ describe '#async' do
+ let(:async) { subject.async }
+
+ context 'when receiving an allowed method' do
+ it 'schedules a PagesTransferWorker', :aggregate_failures do
+ described_class::Async::METHODS.each do |meth|
+ expect(PagesTransferWorker)
+ .to receive(:perform_async).with(meth, %w[foo bar])
+
+ async.public_send(meth, 'foo', 'bar')
+ end
+ end
+ end
+
+ context 'when receiving a private method' do
+ it 'raises NoMethodError' do
+ expect { async.move('foo', 'bar') }.to raise_error(NoMethodError)
+ end
+ end
+
+ context 'when receiving a non-existent method' do
+ it 'raises NoMethodError' do
+ expect { async.foo('bar') }.to raise_error(NoMethodError)
+ end
+ end
+ end
+
+ RSpec.shared_examples 'moving a pages directory' do |parameter|
+ let!(:pages_path_before) { project.pages_path }
+ let(:config_path_before) { File.join(pages_path_before, 'config.json') }
+ let(:pages_path_after) { project.reload.pages_path }
+ let(:config_path_after) { File.join(pages_path_after, 'config.json') }
+
+ before do
+ FileUtils.mkdir_p(pages_path_before)
+ FileUtils.touch(config_path_before)
+ end
+
+ after do
+ FileUtils.remove_entry(pages_path_before, true)
+ FileUtils.remove_entry(pages_path_after, true)
+ end
+
+ it 'moves the directory' do
+ subject.public_send(meth, *args)
+
+ expect(File.exist?(config_path_before)).to be(false)
+ expect(File.exist?(config_path_after)).to be(true)
+ end
+
+ it 'returns false if it fails to move the directory' do
+ # Move the directory once, so it can't be moved again
+ subject.public_send(meth, *args)
+
+ expect(subject.public_send(meth, *args)).to be(false)
+ end
+ end
+
+ describe '#move_namespace' do
+ # Can't use let_it_be because we change the path
+ let(:group_1) { create(:group) }
+ let(:group_2) { create(:group) }
+ let(:subgroup) { create(:group, parent: group_1) }
+ let(:project) { create(:project, group: subgroup) }
+ let(:new_path) { "#{group_2.path}/#{subgroup.path}" }
+ let(:meth) { 'move_namespace' }
+
+ # Store the path before we change it
+ let!(:args) { [project.path, subgroup.full_path, new_path] }
+
+ before do
+ # We need to skip hooks, otherwise the directory will be moved
+ # via an ActiveRecord callback
+ subgroup.update_columns(parent_id: group_2.id)
+ subgroup.route.update!(path: new_path)
+ end
+
+ include_examples 'moving a pages directory'
+ end
+
+ describe '#move_project' do
+ # Can't use let_it_be because we change the path
+ let(:group_1) { create(:group) }
+ let(:group_2) { create(:group) }
+ let(:project) { create(:project, group: group_1) }
+ let(:new_path) { group_2.path }
+ let(:meth) { 'move_project' }
+ let(:args) { [project.path, group_1.full_path, group_2.full_path] }
+
+ include_examples 'moving a pages directory' do
+ before do
+ project.update!(group: group_2)
+ end
+ end
+ end
+
+ describe '#rename_project' do
+ # Can't use let_it_be because we change the path
+ let(:project) { create(:project) }
+ let(:new_path) { project.path.succ }
+ let(:meth) { 'rename_project' }
+
+ # Store the path before we change it
+ let!(:args) { [project.path, new_path, project.namespace.full_path] }
+
+ include_examples 'moving a pages directory' do
+ before do
+ project.update!(path: new_path)
+ end
+ end
+ end
+
+ describe '#rename_namespace' do
+ # Can't use let_it_be because we change the path
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+ let(:new_path) { project.namespace.full_path.succ }
+ let(:meth) { 'rename_namespace' }
+
+ # Store the path before we change it
+ let!(:args) { [project.namespace.full_path, new_path] }
+
+ before do
+ # We need to skip hooks, otherwise the directory will be moved
+ # via an ActiveRecord callback
+ group.update_columns(path: new_path)
+ group.route.update!(path: new_path)
+ end
+
+ include_examples 'moving a pages directory'
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
index 0f760852a68..08ac85c2625 100644
--- a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache
describe '#set_gitlab_model' do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'sets the class and id in redis with a ttl' do
diff --git a/spec/lib/gitlab/project_authorizations_spec.rb b/spec/lib/gitlab/project_authorizations_spec.rb
index 5ff07dcec4f..d2b41ee31d9 100644
--- a/spec/lib/gitlab/project_authorizations_spec.rb
+++ b/spec/lib/gitlab/project_authorizations_spec.rb
@@ -115,6 +115,66 @@ RSpec.describe Gitlab::ProjectAuthorizations do
end
end
+ context 'user with minimal access to group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ subject(:mapping) { map_access_levels(authorizations) }
+
+ context 'group membership' do
+ let!(:group_project) { create(:project, namespace: group) }
+
+ before do
+ create(:group_member, :minimal_access, user: user, source: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[group_project.id]).to be_nil
+ end
+ end
+
+ context 'inherited group membership' do
+ let!(:sub_group) { create(:group, parent: group) }
+ let!(:sub_group_project) { create(:project, namespace: sub_group) }
+
+ before do
+ create(:group_member, :minimal_access, user: user, source: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[sub_group_project.id]).to be_nil
+ end
+ end
+
+ context 'shared group' do
+ let!(:shared_group) { create(:group) }
+ let!(:shared_group_project) { create(:project, namespace: shared_group) }
+
+ before do
+ create(:group_group_link, shared_group: shared_group, shared_with_group: group)
+ create(:group_member, :minimal_access, user: user, source: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[shared_group_project.id]).to be_nil
+ end
+ end
+
+ context 'shared project' do
+ let!(:another_group) { create(:group) }
+ let!(:shared_project) { create(:project, namespace: another_group) }
+
+ before do
+ create(:project_group_link, group: group, project: shared_project)
+ create(:group_member, :minimal_access, user: user, source: group)
+ end
+
+ it 'does not create authorization' do
+ expect(mapping[shared_project.id]).to be_nil
+ end
+ end
+ end
+
context 'with nested groups' do
let(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 6e3c60b58dc..fe0735b8043 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -5,31 +5,34 @@ require 'spec_helper'
RSpec.describe Gitlab::ProjectSearchResults do
include SearchHelpers
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
let(:query) { 'hello world' }
+ let(:repository_ref) { nil }
+ let(:filters) { {} }
- describe 'initialize with empty ref' do
- let(:results) { described_class.new(user, project, query, '') }
+ subject(:results) { described_class.new(user, query, project: project, repository_ref: repository_ref, filters: filters) }
- it { expect(results.project).to eq(project) }
- it { expect(results.query).to eq('hello world') }
- end
+ context 'with a repository_ref' do
+ context 'when empty' do
+ let(:repository_ref) { '' }
+
+ it { expect(results.project).to eq(project) }
+ it { expect(results.query).to eq('hello world') }
+ end
- describe 'initialize with ref' do
- let(:ref) { 'refs/heads/test' }
- let(:results) { described_class.new(user, project, query, ref) }
+ context 'when set' do
+ let(:repository_ref) { 'refs/heads/test' }
- it { expect(results.project).to eq(project) }
- it { expect(results.repository_ref).to eq(ref) }
- it { expect(results.query).to eq('hello world') }
+ it { expect(results.project).to eq(project) }
+ it { expect(results.repository_ref).to eq(repository_ref) }
+ it { expect(results.query).to eq('hello world') }
+ end
end
describe '#formatted_count' do
using RSpec::Parameterized::TableSyntax
- let(:results) { described_class.new(user, project, query) }
-
where(:scope, :count_method, :expected) do
'blobs' | :limited_blobs_count | max_limited_count
'notes' | :limited_notes_count | max_limited_count
@@ -63,7 +66,8 @@ RSpec.describe Gitlab::ProjectSearchResults do
shared_examples 'general blob search' do |entity_type, blob_type|
let(:query) { 'files' }
- subject(:results) { described_class.new(user, project, query).objects(blob_type) }
+
+ subject(:objects) { results.objects(blob_type) }
context "when #{entity_type} is disabled" do
let(:project) { disabled_project }
@@ -94,17 +98,17 @@ RSpec.describe Gitlab::ProjectSearchResults do
end
it 'finds by name' do
- expect(results.map(&:path)).to include(expected_file_by_path)
+ expect(objects.map(&:path)).to include(expected_file_by_path)
end
it "loads all blobs for path matches in single batch" do
expect(Gitlab::Git::Blob).to receive(:batch).once.and_call_original
- results.map(&:data)
+ expect { objects.map(&:data) }.not_to raise_error
end
it 'finds by content' do
- blob = results.select { |result| result.path == expected_file_by_content }.flatten.last
+ blob = objects.select { |result| result.path == expected_file_by_content }.flatten.last
expect(blob.path).to eq(expected_file_by_content)
end
@@ -115,7 +119,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
let(:file_finder) { double }
let(:project_branch) { 'project_branch' }
- subject(:results) { described_class.new(user, project, query, repository_ref).objects(blob_type) }
+ subject(:objects) { results.objects(blob_type) }
before do
allow(entity).to receive(:default_branch).and_return(project_branch)
@@ -128,7 +132,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
it 'uses it' do
expect(Gitlab::FileFinder).to receive(:new).with(project, repository_ref).and_return(file_finder)
- results
+ expect { objects }.not_to raise_error
end
end
@@ -138,7 +142,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
it "uses #{entity_type} repository default reference" do
expect(Gitlab::FileFinder).to receive(:new).with(project, project_branch).and_return(file_finder)
- results
+ expect { objects }.not_to raise_error
end
end
@@ -148,7 +152,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
it "uses #{entity_type} repository default reference" do
expect(Gitlab::FileFinder).to receive(:new).with(project, project_branch).and_return(file_finder)
- results
+ expect { objects }.not_to raise_error
end
end
end
@@ -157,7 +161,6 @@ RSpec.describe Gitlab::ProjectSearchResults do
let(:per_page) { 20 }
let(:count_limit) { described_class::COUNT_LIMIT }
let(:file_finder) { instance_double('Gitlab::FileFinder') }
- let(:results) { described_class.new(user, project, query) }
let(:repository_ref) { 'master' }
before do
@@ -228,139 +231,97 @@ RSpec.describe Gitlab::ProjectSearchResults do
context 'return type' do
let(:blobs) { [Gitlab::Search::FoundBlob.new(project: project)] }
- let(:results) { described_class.new(user, project, "Files", per_page: 20) }
+ let(:query) { "Files" }
+
+ subject(:objects) { results.objects('wiki_blobs', per_page: 20) }
before do
allow(results).to receive(:wiki_blobs).and_return(blobs)
end
it 'returns list of FoundWikiPage type object' do
- objects = results.objects('wiki_blobs')
-
expect(objects).to be_present
expect(objects).to all(be_a(Gitlab::Search::FoundWikiPage))
end
end
end
- it 'does not list issues on private projects' do
- issue = create(:issue, project: project)
-
- results = described_class.new(user, project, issue.title)
-
- expect(results.objects('issues')).not_to include issue
- end
-
- describe 'confidential issues' do
- let(:query) { 'issue' }
- let(:author) { create(:user) }
- let(:assignee) { create(:user) }
- let(:non_member) { create(:user) }
- let(:member) { create(:user) }
- let(:admin) { create(:admin) }
- let(:project) { create(:project, :internal) }
- let!(:issue) { create(:issue, project: project, title: 'Issue 1') }
- let!(:security_issue_1) { create(:issue, :confidential, project: project, title: 'Security issue 1', author: author) }
- let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project, assignees: [assignee]) }
-
- it 'does not list project confidential issues for non project members' do
- results = described_class.new(non_member, project, query)
- issues = results.objects('issues')
-
- expect(issues).to include issue
- expect(issues).not_to include security_issue_1
- expect(issues).not_to include security_issue_2
- expect(results.limited_issues_count).to eq 1
- end
-
- it 'does not list project confidential issues for project members with guest role' do
- project.add_guest(member)
+ describe 'issues search' do
+ let(:issue) { create(:issue, project: project) }
+ let(:query) { issue.title }
+ let(:scope) { 'issues' }
- results = described_class.new(member, project, query)
- issues = results.objects('issues')
+ subject(:objects) { results.objects(scope) }
- expect(issues).to include issue
- expect(issues).not_to include security_issue_1
- expect(issues).not_to include security_issue_2
- expect(results.limited_issues_count).to eq 1
+ it 'does not list issues on private projects' do
+ expect(objects).not_to include issue
end
- it 'lists project confidential issues for author' do
- results = described_class.new(author, project, query)
- issues = results.objects('issues')
-
- expect(issues).to include issue
- expect(issues).to include security_issue_1
- expect(issues).not_to include security_issue_2
- expect(results.limited_issues_count).to eq 2
+ describe "confidential issues" do
+ include_examples "access restricted confidential issues"
end
- it 'lists project confidential issues for assignee' do
- results = described_class.new(assignee, project, query)
- issues = results.objects('issues')
+ context 'filtering' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
+ let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
+ let(:query) { 'foo' }
- expect(issues).to include issue
- expect(issues).not_to include security_issue_1
- expect(issues).to include security_issue_2
- expect(results.limited_issues_count).to eq 2
+ include_examples 'search results filtered by state'
end
+ end
- it 'lists project confidential issues for project members' do
- project.add_developer(member)
-
- results = described_class.new(member, project, query)
- issues = results.objects('issues')
-
- expect(issues).to include issue
- expect(issues).to include security_issue_1
- expect(issues).to include security_issue_2
- expect(results.limited_issues_count).to eq 3
- end
+ describe 'merge requests search' do
+ let(:scope) { 'merge_requests' }
+ let(:project) { create(:project, :public) }
- it 'lists all project issues for admin' do
- results = described_class.new(admin, project, query)
- issues = results.objects('issues')
+ context 'filtering' do
+ let!(:project) { create(:project, :public) }
+ let!(:opened_result) { create(:merge_request, :opened, source_project: project, title: 'foo opened') }
+ let!(:closed_result) { create(:merge_request, :closed, source_project: project, title: 'foo closed') }
+ let(:query) { 'foo' }
- expect(issues).to include issue
- expect(issues).to include security_issue_1
- expect(issues).to include security_issue_2
- expect(results.limited_issues_count).to eq 3
+ include_examples 'search results filtered by state'
end
end
describe 'notes search' do
- it 'lists notes' do
- project = create(:project, :public)
- note = create(:note, project: project)
+ let(:query) { note.note }
- results = described_class.new(user, project, note.note)
+ subject(:notes) { results.objects('notes') }
- expect(results.objects('notes')).to include note
- end
+ context 'with a public project' do
+ let(:project) { create(:project, :public) }
+ let(:note) { create(:note, project: project) }
- it "doesn't list issue notes when access is restricted" do
- project = create(:project, :public, :issues_private)
- note = create(:note_on_issue, project: project)
+ it 'lists notes' do
+ expect(notes).to include note
+ end
+ end
- results = described_class.new(user, project, note.note)
+ context 'with private issues' do
+ let(:project) { create(:project, :public, :issues_private) }
+ let(:note) { create(:note_on_issue, project: project) }
- expect(results.objects('notes')).not_to include note
+ it "doesn't list issue notes when access is restricted" do
+ expect(notes).not_to include note
+ end
end
- it "doesn't list merge_request notes when access is restricted" do
- project = create(:project, :public, :merge_requests_private)
- note = create(:note_on_merge_request, project: project)
+ context 'with private merge requests' do
+ let(:project) { create(:project, :public, :merge_requests_private) }
+ let(:note) { create(:note_on_merge_request, project: project) }
- results = described_class.new(user, project, note.note)
-
- expect(results.objects('notes')).not_to include note
+ it "doesn't list merge_request notes when access is restricted" do
+ expect(notes).not_to include note
+ end
end
end
describe '#limited_notes_count' do
let(:project) { create(:project, :public) }
let(:note) { create(:note_on_issue, project: project) }
- let(:results) { described_class.new(user, project, note.note) }
+ let(:query) { note.note }
context 'when count_limit is lower than total amount' do
before do
@@ -375,11 +336,6 @@ RSpec.describe Gitlab::ProjectSearchResults do
context 'when count_limit is higher than total amount' do
it 'calls note finder multiple times to get the limited amount of notes' do
- project = create(:project, :public)
- note = create(:note_on_issue, project: project)
-
- results = described_class.new(user, project, note.note)
-
expect(results).to receive(:notes_finder).exactly(4).times.and_call_original
expect(results.limited_notes_count).to eq(1)
end
@@ -395,7 +351,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
.with(anything, anything, anything, described_class::COUNT_LIMIT)
.and_call_original
- described_class.new(user, project, '.').commits_count
+ results.commits_count
end
end
@@ -406,19 +362,23 @@ RSpec.describe Gitlab::ProjectSearchResults do
# * commit
#
shared_examples 'access restricted commits' do
+ let(:query) { search_phrase }
+
context 'when project is internal' do
let(:project) { create(:project, :internal, :repository) }
- it 'does not search if user is not authenticated' do
- commits = described_class.new(nil, project, search_phrase).objects('commits')
+ subject(:commits) { results.objects('commits') }
- expect(commits).to be_empty
+ it 'searches if user is authenticated' do
+ expect(commits).to contain_exactly commit
end
- it 'searches if user is authenticated' do
- commits = described_class.new(user, project, search_phrase).objects('commits')
+ context 'when the user is not authenticated' do
+ let(:user) { nil }
- expect(commits).to contain_exactly commit
+ it 'does not search' do
+ expect(commits).to be_empty
+ end
end
end
@@ -437,29 +397,35 @@ RSpec.describe Gitlab::ProjectSearchResults do
user
end
- it 'does not show commit to stranger' do
- commits = described_class.new(nil, private_project, search_phrase).objects('commits')
+ let(:project) { private_project }
- expect(commits).to be_empty
+ subject(:commits) { results.objects('commits') }
+
+ context 'when the user is not authenticated' do
+ let(:user) { nil }
+
+ it 'does not show commit to stranger' do
+ expect(commits).to be_empty
+ end
end
context 'team access' do
- it 'shows commit to creator' do
- commits = described_class.new(creator, private_project, search_phrase).objects('commits')
+ context 'when the user is the creator' do
+ let(:user) { creator }
- expect(commits).to contain_exactly commit
+ it { expect(commits).to contain_exactly commit }
end
- it 'shows commit to master' do
- commits = described_class.new(team_master, private_project, search_phrase).objects('commits')
+ context 'when the user is a master' do
+ let(:user) { team_master }
- expect(commits).to contain_exactly commit
+ it { expect(commits).to contain_exactly commit }
end
- it 'shows commit to reporter' do
- commits = described_class.new(team_reporter, private_project, search_phrase).objects('commits')
+ context 'when the user is a reporter' do
+ let(:user) { team_reporter }
- expect(commits).to contain_exactly commit
+ it { expect(commits).to contain_exactly commit }
end
end
end
@@ -471,9 +437,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
it 'returns the correct results for each page' do
expect(results_page(1)).to contain_exactly(commit('b83d6e391c22777fca1ed3012fce84f633d7fed0'))
-
expect(results_page(2)).to contain_exactly(commit('498214de67004b1da3d820901307bed2a68a8ef6'))
-
expect(results_page(3)).to contain_exactly(commit('1b12f15a11fc6e62177bef08f47bc7b5ce50b141'))
end
@@ -506,7 +470,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
end
def results_page(page)
- described_class.new(user, project, '.').objects('commits', per_page: 1, page: page)
+ described_class.new(user, '.', project: project).objects('commits', per_page: 1, page: page)
end
def commit(hash)
@@ -518,26 +482,27 @@ RSpec.describe Gitlab::ProjectSearchResults do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.repository.commit('59e29889be61e6e0e5e223bfa9ac2721d31605b8') }
let(:message) { 'Sorry, I did a mistake' }
+ let(:query) { message }
- it 'finds commit by message' do
- commits = described_class.new(user, project, message).objects('commits')
+ subject(:commits) { results.objects('commits') }
+ it 'finds commit by message' do
expect(commits).to contain_exactly commit
end
- it 'handles when no commit match' do
- commits = described_class.new(user, project, 'not really an existing description').objects('commits')
+ context 'when there are not hits' do
+ let(:query) { 'not really an existing description' }
- expect(commits).to be_empty
+ it 'handles when no commit match' do
+ expect(commits).to be_empty
+ end
end
context 'when repository_ref is provided' do
- let(:message) { 'Feature added' }
+ let(:query) { 'Feature added' }
let(:repository_ref) { 'feature' }
it 'searches in the specified ref' do
- commits = described_class.new(user, project, message, repository_ref).objects('commits')
-
# This commit is unique to the feature branch
expect(commits).to contain_exactly(project.repository.commit('0b4bc9a49b562e85de7cc9e834518ea6828729b9'))
end
@@ -557,14 +522,14 @@ RSpec.describe Gitlab::ProjectSearchResults do
commit_hashes.each do |type, commit_hash|
it "shows commit by #{type} hash id" do
- commits = described_class.new(user, project, commit_hash).objects('commits')
+ commits = described_class.new(user, commit_hash, project: project).objects('commits')
expect(commits).to contain_exactly commit
end
end
it 'handles not existing commit hash correctly' do
- commits = described_class.new(user, project, 'deadbeef').objects('commits')
+ commits = described_class.new(user, 'deadbeef', project: project).objects('commits')
expect(commits).to be_empty
end
@@ -577,9 +542,13 @@ RSpec.describe Gitlab::ProjectSearchResults do
end
describe 'user search' do
- it 'returns the user belonging to the project matching the search query' do
- project = create(:project)
+ let(:query) { 'gob' }
+ let(:group) { create(:group) }
+ let(:project) { create(:project, namespace: group) }
+ subject(:objects) { results.objects('users') }
+
+ it 'returns the user belonging to the project matching the search query' do
user1 = create(:user, username: 'gob_bluth')
create(:project_member, :developer, user: user1, project: project)
@@ -588,23 +557,16 @@ RSpec.describe Gitlab::ProjectSearchResults do
create(:user, username: 'gob_2018')
- result = described_class.new(user, project, 'gob').objects('users')
-
- expect(result).to eq [user1]
+ expect(objects).to contain_exactly(user1)
end
it 'returns the user belonging to the group matching the search query' do
- group = create(:group)
- project = create(:project, namespace: group)
-
user1 = create(:user, username: 'gob_bluth')
create(:group_member, :developer, user: user1, group: group)
create(:user, username: 'gob_2018')
- result = described_class.new(user, project, 'gob').objects('users')
-
- expect(result).to eq [user1]
+ expect(objects).to contain_exactly(user1)
end
end
end
diff --git a/spec/lib/gitlab/prometheus/internal_spec.rb b/spec/lib/gitlab/prometheus/internal_spec.rb
index 1254610fe32..7771d85222a 100644
--- a/spec/lib/gitlab/prometheus/internal_spec.rb
+++ b/spec/lib/gitlab/prometheus/internal_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe Gitlab::Prometheus::Internal do
let(:listen_address) { nil }
it 'does not fail' do
- expect(described_class.uri).to eq(nil)
+ expect(described_class.uri).to be_nil
end
end
@@ -56,12 +56,32 @@ RSpec.describe Gitlab::Prometheus::Internal do
let(:listen_address) { '' }
it 'does not configure prometheus' do
- expect(described_class.uri).to eq(nil)
+ expect(described_class.uri).to be_nil
end
end
end
- describe 'prometheus_enabled?' do
+ describe '.server_address' do
+ context 'self.uri returns valid uri' do
+ ['http://localhost:9090', 'https://localhost:9090 '].each do |valid_uri|
+ it 'returns correct server address' do
+ expect(described_class).to receive(:uri).and_return(valid_uri)
+
+ expect(described_class.server_address).to eq('localhost:9090')
+ end
+ end
+ end
+
+ context 'self.uri returns nil' do
+ it 'returns nil' do
+ expect(described_class).to receive(:uri).and_return(nil)
+
+ expect(described_class.server_address).to be_nil
+ end
+ end
+ end
+
+ describe '.prometheus_enabled?' do
it 'returns correct value' do
expect(described_class.prometheus_enabled?).to eq(true)
end
@@ -101,7 +121,7 @@ RSpec.describe Gitlab::Prometheus::Internal do
end
it 'does not fail' do
- expect(described_class.listen_address).to eq(nil)
+ expect(described_class.listen_address).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb
index f5911963108..d0dee2ad366 100644
--- a/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/additional_metrics_environment_query_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Prometheus::Queries::AdditionalMetricsEnvironmentQuery do
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
include_examples 'additional metrics query' do
diff --git a/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb b/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb
index 045c063ab34..e3706a4b106 100644
--- a/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/validate_query_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Prometheus::Queries::ValidateQuery do
let(:error_message) { "invalid parameter 'query': 1:9: parse error: unexpected identifier \"query\"" }
it 'returns invalid' do
- Timecop.freeze do
+ freeze_time do
stub_prometheus_query_error(
prometheus_query_with_time_url(query, Time.now),
error_message
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::Prometheus::Queries::ValidateQuery do
end
it 'catches exception and returns invalid' do
- Timecop.freeze do
+ freeze_time do
expect(subject.query(query)).to eq(valid: false, error: message)
end
end
diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb
index 0774c2f3144..82ef4675553 100644
--- a/spec/lib/gitlab/prometheus_client_spec.rb
+++ b/spec/lib/gitlab/prometheus_client_spec.rb
@@ -36,6 +36,28 @@ RSpec.describe Gitlab::PrometheusClient do
end
end
+ describe '#ready?' do
+ it 'returns true when status code is 200' do
+ stub_request(:get, subject.ready_url).to_return(status: 200, body: 'Prometheus is Ready.\n')
+
+ expect(subject.ready?).to eq(true)
+ end
+
+ it 'returns false when status code is not 200' do
+ [503, 500].each do |code|
+ stub_request(:get, subject.ready_url).to_return(status: code, body: 'Service Unavailable')
+
+ expect(subject.ready?).to eq(false)
+ end
+ end
+
+ it 'raises error when ready api throws exception' do
+ stub_request(:get, subject.ready_url).to_raise(Net::OpenTimeout)
+
+ expect { subject.ready? }.to raise_error(Gitlab::PrometheusClient::UnexpectedResponseError)
+ end
+ end
+
# This shared examples expect:
# - query_url: A query URL
# - execute_query: A query call
@@ -136,7 +158,7 @@ RSpec.describe Gitlab::PrometheusClient do
let(:query_url) { prometheus_query_with_time_url(prometheus_query, Time.now.utc) }
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
context 'when request returns vector results' do
@@ -195,7 +217,7 @@ RSpec.describe Gitlab::PrometheusClient do
let(:query_url) { prometheus_query_with_time_url(query, Time.now.utc) }
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
context 'when request returns vector results' do
@@ -228,7 +250,7 @@ RSpec.describe Gitlab::PrometheusClient do
let(:query_url) { prometheus_series_url('series_name', 'other_service') }
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
it 'calls endpoint and returns list of series' do
@@ -259,7 +281,7 @@ RSpec.describe Gitlab::PrometheusClient do
let(:query_url) { prometheus_query_range_url(prometheus_query) }
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
context 'when non utc time is passed' do
@@ -358,7 +380,7 @@ RSpec.describe Gitlab::PrometheusClient do
let(:query_url) { prometheus_query_url(prometheus_query) }
around do |example|
- Timecop.freeze { example.run }
+ freeze_time { example.run }
end
context 'when response status code is 200' do
diff --git a/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb b/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
index b28ac49b4ea..8a4e9ab8bb7 100644
--- a/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
+++ b/spec/lib/gitlab/quick_actions/substitution_definition_spec.rb
@@ -46,24 +46,4 @@ EOF
end
end
end
-
- describe '#match' do
- it 'checks the content for the command' do
- expect(subject.match(content)).to be_truthy
- end
-
- it 'returns the match data' do
- data = subject.match(content)
- expect(data).to be_a(MatchData)
- expect(data[1]).to eq('I like this stuff')
- end
-
- it 'is nil if content does not have the command' do
- expect(subject.match('blah')).to be_falsey
- end
-
- it 'is nil if content contains the command as prefix' do
- expect(subject.match('/sub_namex')).to be_falsey
- end
- end
end
diff --git a/spec/lib/gitlab/reference_counter_spec.rb b/spec/lib/gitlab/reference_counter_spec.rb
index 0d0ac75ee22..83e4006c69b 100644
--- a/spec/lib/gitlab/reference_counter_spec.rb
+++ b/spec/lib/gitlab/reference_counter_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::ReferenceCounter, :clean_gitlab_redis_shared_state do
end
it 'warns if attempting to decrease a counter with a value of zero or less, and resets the counter' do
- expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
+ expect(Gitlab::AppLogger).to receive(:warn).with("Reference counter for project-1" \
" decreased when its value was less than 1. Resetting the counter.")
expect { reference_counter.decrease }.not_to change { reference_counter.value }
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index afa930b795a..88c3315150b 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -3,14 +3,19 @@
require 'fast_spec_helper'
RSpec.describe Gitlab::Regex do
- shared_examples_for 'project/group name regex' do
+ shared_examples_for 'project/group name chars regex' do
it { is_expected.to match('gitlab-ce') }
it { is_expected.to match('GitLab CE') }
it { is_expected.to match('100 lines') }
it { is_expected.to match('gitlab.git') }
it { is_expected.to match('Český název') }
it { is_expected.to match('Dash – is this') }
+ end
+
+ shared_examples_for 'project/group name regex' do
+ it_behaves_like 'project/group name chars regex'
it { is_expected.not_to match('?gitlab') }
+ it { is_expected.not_to match("Users's something") }
end
describe '.project_name_regex' do
@@ -33,6 +38,16 @@ RSpec.describe Gitlab::Regex do
end
end
+ describe '.group_name_regex_chars' do
+ subject { described_class.group_name_regex_chars }
+
+ it_behaves_like 'project/group name chars regex'
+
+ it 'allows partial matches' do
+ is_expected.to match(',Valid name wrapped in ivalid chars&')
+ end
+ end
+
describe '.project_name_regex_message' do
subject { described_class.project_name_regex_message }
@@ -302,6 +317,73 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
+ describe '.pypi_version_regex' do
+ subject { described_class.pypi_version_regex }
+
+ it { is_expected.to match('0.1') }
+ it { is_expected.to match('2.0') }
+ it { is_expected.to match('1.2.0')}
+ it { is_expected.to match('0100!0.0') }
+ it { is_expected.to match('00!1.2') }
+ it { is_expected.to match('1.0a') }
+ it { is_expected.to match('1.0-a') }
+ it { is_expected.to match('1.0.a1') }
+ it { is_expected.to match('1.0a1') }
+ it { is_expected.to match('1.0-a1') }
+ it { is_expected.to match('1.0alpha1') }
+ it { is_expected.to match('1.0b1') }
+ it { is_expected.to match('1.0beta1') }
+ it { is_expected.to match('1.0rc1') }
+ it { is_expected.to match('1.0pre1') }
+ it { is_expected.to match('1.0preview1') }
+ it { is_expected.to match('1.0.dev1') }
+ it { is_expected.to match('1.0.DEV1') }
+ it { is_expected.to match('1.0.post1') }
+ it { is_expected.to match('1.0.rev1') }
+ it { is_expected.to match('1.0.r1') }
+ it { is_expected.to match('1.0c2') }
+ it { is_expected.to match('2012.15') }
+ it { is_expected.to match('1.0+5') }
+ it { is_expected.to match('1.0+abc.5') }
+ it { is_expected.to match('1!1.1') }
+ it { is_expected.to match('1.0c3') }
+ it { is_expected.to match('1.0rc2') }
+ it { is_expected.to match('1.0c1') }
+ it { is_expected.to match('1.0b2-346') }
+ it { is_expected.to match('1.0b2.post345') }
+ it { is_expected.to match('1.0b2.post345.dev456') }
+ it { is_expected.to match('1.2.rev33+123456') }
+ it { is_expected.to match('1.1.dev1') }
+ it { is_expected.to match('1.0b1.dev456') }
+ it { is_expected.to match('1.0a12.dev456') }
+ it { is_expected.to match('1.0b2') }
+ it { is_expected.to match('1.0.dev456') }
+ it { is_expected.to match('1.0c1.dev456') }
+ it { is_expected.to match('1.0.post456') }
+ it { is_expected.to match('1.0.post456.dev34') }
+ it { is_expected.to match('1.2+123abc') }
+ it { is_expected.to match('1.2+abc') }
+ it { is_expected.to match('1.2+abc123') }
+ it { is_expected.to match('1.2+abc123def') }
+ it { is_expected.to match('1.2+1234.abc') }
+ it { is_expected.to match('1.2+123456') }
+ it { is_expected.to match('1.2.r32+123456') }
+ it { is_expected.to match('1!1.2.rev33+123456') }
+ it { is_expected.to match('1.0a12') }
+ it { is_expected.to match('1.2.3-45+abcdefgh') }
+ it { is_expected.to match('v1.2.3') }
+ it { is_expected.not_to match('1.2.3-45-abcdefgh') }
+ it { is_expected.not_to match('..1.2.3') }
+ it { is_expected.not_to match(' 1.2.3') }
+ it { is_expected.not_to match("1.2.3 \r\t") }
+ it { is_expected.not_to match("\r\t 1.2.3") }
+ it { is_expected.not_to match('1./2.3') }
+ it { is_expected.not_to match('1.2.3-4/../../') }
+ it { is_expected.not_to match('1.2.3-4%2e%2e%') }
+ it { is_expected.not_to match('../../../../../1.2.3') }
+ it { is_expected.not_to match('%2e%2e%2f1.2.3') }
+ end
+
describe '.semver_regex' do
subject { described_class.semver_regex }
@@ -335,4 +417,21 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('1.2') }
it { is_expected.not_to match('1./2.3') }
end
+
+ describe '.generic_package_version_regex' do
+ subject { described_class.generic_package_version_regex }
+
+ it { is_expected.to match('1.2.3') }
+ it { is_expected.to match('1.3.350') }
+ it { is_expected.not_to match('1.3.350-20201230123456') }
+ it { is_expected.not_to match('..1.2.3') }
+ it { is_expected.not_to match(' 1.2.3') }
+ it { is_expected.not_to match("1.2.3 \r\t") }
+ it { is_expected.not_to match("\r\t 1.2.3") }
+ it { is_expected.not_to match('1.2.3-4/../../') }
+ it { is_expected.not_to match('1.2.3-4%2e%2e%') }
+ it { is_expected.not_to match('../../../../../1.2.3') }
+ it { is_expected.not_to match('%2e%2e%2f1.2.3') }
+ it { is_expected.not_to match('') }
+ end
end
diff --git a/spec/lib/gitlab/relative_positioning/item_context_spec.rb b/spec/lib/gitlab/relative_positioning/item_context_spec.rb
new file mode 100644
index 00000000000..daea8d8470d
--- /dev/null
+++ b/spec/lib/gitlab/relative_positioning/item_context_spec.rb
@@ -0,0 +1,215 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::RelativePositioning::ItemContext do
+ let_it_be(:default_user) { create_default(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+
+ def create_issue(pos)
+ create(:issue, project: project, relative_position: pos)
+ end
+
+ range = (101..107) # A deliberately small range, so we can test everything
+ indices = (0..).take(range.size)
+
+ let(:start) { ((range.first + range.last) / 2.0).floor }
+ let(:subjects) { issues.map { |i| described_class.new(i.reset, range) } }
+
+ # This allows us to refer to range in methods and examples
+ let_it_be(:full_range) { range }
+
+ context 'there are gaps at the start and end' do
+ let_it_be(:issues) { (range.first.succ..range.last.pred).map { |pos| create_issue(pos) } }
+
+ it 'is always possible to find a gap' do
+ expect(subjects)
+ .to all(have_attributes(find_next_gap_before: be_present, find_next_gap_after: be_present))
+ end
+
+ where(:index) { indices.reverse.drop(2) }
+
+ with_them do
+ subject { subjects[index] }
+
+ let(:positions) { subject.scoped_items.map(&:relative_position) }
+
+ it 'is possible to shift_right, which will consume the gap at the end' do
+ subject.shift_right
+
+ expect(subject.find_next_gap_after).not_to be_present
+
+ expect(positions).to all(be_between(range.first, range.last))
+ expect(positions).to eq(positions.uniq)
+ end
+
+ it 'is possible to create_space_right, which will move the gap to immediately after' do
+ subject.create_space_right
+
+ expect(subject.find_next_gap_after).to have_attributes(start_pos: subject.relative_position)
+ expect(positions).to all(be_between(range.first, range.last))
+ expect(positions).to eq(positions.uniq)
+ end
+
+ it 'is possible to shift_left, which will consume the gap at the start' do
+ subject.shift_left
+
+ expect(subject.find_next_gap_before).not_to be_present
+ expect(positions).to all(be_between(range.first, range.last))
+ expect(positions).to eq(positions.uniq)
+ end
+
+ it 'is possible to create_space_left, which will move the gap to immediately before' do
+ subject.create_space_left
+
+ expect(subject.find_next_gap_before).to have_attributes(start_pos: subject.relative_position)
+ expect(positions).to all(be_between(range.first, range.last))
+ expect(positions).to eq(positions.uniq)
+ end
+ end
+ end
+
+ context 'there is a gap of multiple spaces' do
+ let_it_be(:issues) { [range.first, range.last].map { |pos| create_issue(pos) } }
+
+ it 'is impossible to move the last element to the right' do
+ expect { subjects.last.shift_right }.to raise_error(Gitlab::RelativePositioning::NoSpaceLeft)
+ end
+
+ it 'is impossible to move the first element to the left' do
+ expect { subjects.first.shift_left }.to raise_error(Gitlab::RelativePositioning::NoSpaceLeft)
+ end
+
+ it 'is possible to move the last element to the left' do
+ subject = subjects.last
+
+ expect { subject.shift_left }.to change { subject.relative_position }.by(be < 0)
+ end
+
+ it 'is possible to move the first element to the right' do
+ subject = subjects.first
+
+ expect { subject.shift_right }.to change { subject.relative_position }.by(be > 0)
+ end
+
+ it 'is possible to find the gap from the right' do
+ gap = Gitlab::RelativePositioning::Gap.new(range.last, range.first)
+
+ expect(subjects.last).to have_attributes(
+ find_next_gap_before: eq(gap),
+ find_next_gap_after: be_nil
+ )
+ end
+
+ it 'is possible to find the gap from the left' do
+ gap = Gitlab::RelativePositioning::Gap.new(range.first, range.last)
+
+ expect(subjects.first).to have_attributes(
+ find_next_gap_before: be_nil,
+ find_next_gap_after: eq(gap)
+ )
+ end
+ end
+
+ context 'there are several free spaces' do
+ let_it_be(:issues) { range.select(&:even?).map { |pos| create_issue(pos) } }
+ let_it_be(:gaps) do
+ range.select(&:odd?).map do |pos|
+ rhs = pos.succ.clamp(range.first, range.last)
+ lhs = pos.pred.clamp(range.first, range.last)
+
+ {
+ before: Gitlab::RelativePositioning::Gap.new(rhs, lhs),
+ after: Gitlab::RelativePositioning::Gap.new(lhs, rhs)
+ }
+ end
+ end
+
+ def issue_at(position)
+ issues.find { |i| i.relative_position == position }
+ end
+
+ where(:current_pos) { range.select(&:even?) }
+
+ with_them do
+ let(:subject) { subjects.find { |s| s.relative_position == current_pos } }
+ let(:siblings) { subjects.reject { |s| s.relative_position == current_pos } }
+
+ def covered_by_range(pos)
+ full_range.cover?(pos) ? pos : nil
+ end
+
+ it 'finds the closest gap' do
+ closest_gap_before = gaps
+ .map { |gap| gap[:before] }
+ .select { |gap| gap.start_pos <= subject.relative_position }
+ .max_by { |gap| gap.start_pos }
+ closest_gap_after = gaps
+ .map { |gap| gap[:after] }
+ .select { |gap| gap.start_pos >= subject.relative_position }
+ .min_by { |gap| gap.start_pos }
+
+ expect(subject).to have_attributes(
+ find_next_gap_before: closest_gap_before,
+ find_next_gap_after: closest_gap_after
+ )
+ end
+
+ it 'finds the neighbours' do
+ expect(subject).to have_attributes(
+ lhs_neighbour: subject.neighbour(issue_at(subject.relative_position - 2)),
+ rhs_neighbour: subject.neighbour(issue_at(subject.relative_position + 2))
+ )
+ end
+
+ it 'finds the next relative_positions' do
+ expect(subject).to have_attributes(
+ prev_relative_position: covered_by_range(subject.relative_position - 2),
+ next_relative_position: covered_by_range(subject.relative_position + 2)
+ )
+ end
+
+ it 'finds the min/max positions' do
+ expect(subject).to have_attributes(
+ min_relative_position: issues.first.relative_position,
+ max_relative_position: issues.last.relative_position
+ )
+ end
+
+ it 'finds the min/max siblings' do
+ expect(subject).to have_attributes(
+ min_sibling: siblings.first,
+ max_sibling: siblings.last
+ )
+ end
+ end
+ end
+
+ context 'there is at least one free space' do
+ where(:free_space) { range.to_a }
+
+ with_them do
+ let(:issues) { range.reject { |x| x == free_space }.map { |p| create_issue(p) } }
+ let(:gap_rhs) { free_space.succ.clamp(range.first, range.last) }
+ let(:gap_lhs) { free_space.pred.clamp(range.first, range.last) }
+
+ it 'can always find a gap before if there is space to the left' do
+ expected_gap = Gitlab::RelativePositioning::Gap.new(gap_rhs, gap_lhs)
+
+ to_the_right_of_gap = subjects.select { |s| free_space < s.relative_position }
+
+ expect(to_the_right_of_gap)
+ .to all(have_attributes(find_next_gap_before: eq(expected_gap), find_next_gap_after: be_nil))
+ end
+
+ it 'can always find a gap after if there is space to the right' do
+ expected_gap = Gitlab::RelativePositioning::Gap.new(gap_lhs, gap_rhs)
+
+ to_the_left_of_gap = subjects.select { |s| s.relative_position < free_space }
+
+ expect(to_the_left_of_gap)
+ .to all(have_attributes(find_next_gap_before: be_nil, find_next_gap_after: eq(expected_gap)))
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/relative_positioning/mover_spec.rb b/spec/lib/gitlab/relative_positioning/mover_spec.rb
new file mode 100644
index 00000000000..c49230c2415
--- /dev/null
+++ b/spec/lib/gitlab/relative_positioning/mover_spec.rb
@@ -0,0 +1,487 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RelativePositioning::Mover do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:one_sibling, reload: true) { create(:project, creator: user, namespace: user.namespace) }
+ let_it_be(:one_free_space, reload: true) { create(:project, creator: user, namespace: user.namespace) }
+ let_it_be(:fully_occupied, reload: true) { create(:project, creator: user, namespace: user.namespace) }
+ let_it_be(:no_issues, reload: true) { create(:project, creator: user, namespace: user.namespace) }
+ let_it_be(:three_sibs, reload: true) { create(:project, creator: user, namespace: user.namespace) }
+
+ def create_issue(pos, parent = project)
+ create(:issue, author: user, project: parent, relative_position: pos)
+ end
+
+ range = (101..105)
+ indices = (0..).take(range.size)
+
+ let(:start) { ((range.first + range.last) / 2.0).floor }
+
+ subject { described_class.new(start, range) }
+
+ let_it_be(:full_set) do
+ range.each_with_index.map do |pos, i|
+ create(:issue, iid: i.succ, project: fully_occupied, relative_position: pos)
+ end
+ end
+
+ let_it_be(:sole_sibling) { create(:issue, iid: 1, project: one_sibling, relative_position: nil) }
+ let_it_be(:one_sibling_set) { [sole_sibling] }
+ let_it_be(:one_free_space_set) do
+ indices.drop(1).map { |iid| create(:issue, project: one_free_space, iid: iid.succ) }
+ end
+ let_it_be(:three_sibs_set) do
+ [1, 2, 3].map { |iid| create(:issue, iid: iid, project: three_sibs) }
+ end
+
+ def set_positions(positions)
+ vals = issues.zip(positions).map do |issue, pos|
+ issue.relative_position = pos
+ "(#{issue.id}, #{pos})"
+ end.join(', ')
+
+ Issue.connection.exec_query(<<~SQL, 'set-positions')
+ WITH cte(cte_id, new_pos) AS (
+ SELECT * FROM (VALUES #{vals}) as t (id, pos)
+ )
+ UPDATE issues SET relative_position = new_pos FROM cte WHERE id = cte_id
+ ;
+ SQL
+ end
+
+ def ids_in_position_order
+ project.issues.reorder(:relative_position).pluck(:id)
+ end
+
+ def relative_positions
+ project.issues.pluck(:relative_position)
+ end
+
+ describe '#move_to_end' do
+ def max_position
+ project.issues.maximum(:relative_position)
+ end
+
+ def move_to_end(issue)
+ subject.move_to_end(issue)
+ issue.save!
+ end
+
+ shared_examples 'able to place a new item at the end' do
+ it 'can place any new item' do
+ existing_issues = ids_in_position_order
+ new_item = create_issue(nil)
+
+ expect do
+ move_to_end(new_item)
+ end.to change { project.issues.pluck(:id, :relative_position) }
+
+ expect(new_item.relative_position).to eq(max_position)
+ expect(relative_positions).to all(be_between(range.first, range.last))
+ expect(ids_in_position_order).to eq(existing_issues + [new_item.id])
+ end
+ end
+
+ shared_examples 'able to move existing items to the end' do
+ it 'can move any existing item' do
+ issues = project.issues.reorder(:relative_position).to_a
+ issue = issues[index]
+ other_issues = issues.reject { |i| i == issue }
+ expect(relative_positions).to all(be_between(range.first, range.last))
+
+ if issues.last == issue
+ move_to_end(issue) # May not change the positions
+ else
+ expect do
+ move_to_end(issue)
+ end.to change { project.issues.pluck(:id, :relative_position) }
+ end
+
+ project.reset
+
+ expect(relative_positions).to all(be_between(range.first, range.last))
+ expect(issue.relative_position).to eq(max_position)
+ expect(ids_in_position_order).to eq(other_issues.map(&:id) + [issue.id])
+ end
+ end
+
+ context 'all positions are taken' do
+ let(:issues) { full_set }
+ let(:project) { fully_occupied }
+
+ it 'raises an error when placing a new item' do
+ new_item = create_issue(nil)
+
+ expect { subject.move_to_end(new_item) }.to raise_error(RelativePositioning::NoSpaceLeft)
+ end
+
+ where(:index) { indices }
+
+ with_them do
+ it_behaves_like 'able to move existing items to the end'
+ end
+ end
+
+ context 'there are no siblings' do
+ let(:issues) { [] }
+ let(:project) { no_issues }
+
+ it_behaves_like 'able to place a new item at the end'
+ end
+
+ context 'there is only one sibling' do
+ where(:pos) { range.to_a }
+
+ with_them do
+ let(:issues) { one_sibling_set }
+ let(:project) { one_sibling }
+ let(:index) { 0 }
+
+ before do
+ sole_sibling.reset.update!(relative_position: pos)
+ end
+
+ it_behaves_like 'able to place a new item at the end'
+
+ it_behaves_like 'able to move existing items to the end'
+ end
+ end
+
+ context 'at least one position is free' do
+ where(:free_space, :index) do
+ is = indices.take(range.size - 1)
+
+ range.to_a.product(is)
+ end
+
+ with_them do
+ let(:issues) { one_free_space_set }
+ let(:project) { one_free_space }
+
+ before do
+ positions = range.reject { |x| x == free_space }
+ set_positions(positions)
+ end
+
+ it_behaves_like 'able to place a new item at the end'
+
+ it_behaves_like 'able to move existing items to the end'
+ end
+ end
+ end
+
+ describe '#move_to_start' do
+ def min_position
+ project.issues.minimum(:relative_position)
+ end
+
+ def move_to_start(issue)
+ subject.move_to_start(issue)
+ issue.save!
+ end
+
+ shared_examples 'able to place a new item at the start' do
+ it 'can place any new item' do
+ existing_issues = ids_in_position_order
+ new_item = create_issue(nil)
+
+ expect do
+ move_to_start(new_item)
+ end.to change { project.issues.pluck(:id, :relative_position) }
+
+ expect(relative_positions).to all(be_between(range.first, range.last))
+ expect(new_item.relative_position).to eq(min_position)
+ expect(ids_in_position_order).to eq([new_item.id] + existing_issues)
+ end
+ end
+
+ shared_examples 'able to move existing items to the start' do
+ it 'can move any existing item' do
+ issues = project.issues.reorder(:relative_position).to_a
+ issue = issues[index]
+ other_issues = issues.reject { |i| i == issue }
+ expect(relative_positions).to all(be_between(range.first, range.last))
+
+ if issues.first == issue
+ move_to_start(issue) # May not change the positions
+ else
+ expect do
+ move_to_start(issue)
+ end.to change { project.issues.pluck(:id, :relative_position) }
+ end
+
+ project.reset
+
+ expect(relative_positions).to all(be_between(range.first, range.last))
+ expect(issue.relative_position).to eq(min_position)
+ expect(ids_in_position_order).to eq([issue.id] + other_issues.map(&:id))
+ end
+ end
+
+ context 'all positions are taken' do
+ let(:issues) { full_set }
+ let(:project) { fully_occupied }
+
+ it 'raises an error when placing a new item' do
+ new_item = create(:issue, project: project, relative_position: nil)
+
+ expect { subject.move_to_start(new_item) }.to raise_error(RelativePositioning::NoSpaceLeft)
+ end
+
+ where(:index) { indices }
+
+ with_them do
+ it_behaves_like 'able to move existing items to the start'
+ end
+ end
+
+ context 'there are no siblings' do
+ let(:project) { no_issues }
+ let(:issues) { [] }
+
+ it_behaves_like 'able to place a new item at the start'
+ end
+
+ context 'there is only one sibling' do
+ where(:pos) { range.to_a }
+
+ with_them do
+ let(:issues) { one_sibling_set }
+ let(:project) { one_sibling }
+ let(:index) { 0 }
+
+ before do
+ sole_sibling.reset.update!(relative_position: pos)
+ end
+
+ it_behaves_like 'able to place a new item at the start'
+
+ it_behaves_like 'able to move existing items to the start'
+ end
+ end
+
+ context 'at least one position is free' do
+ where(:free_space, :index) do
+ range.to_a.product((0..).take(range.size - 1).to_a)
+ end
+
+ with_them do
+ let(:issues) { one_free_space_set }
+ let(:project) { one_free_space }
+
+ before do
+ set_positions(range.reject { |x| x == free_space })
+ end
+
+ it_behaves_like 'able to place a new item at the start'
+
+ it_behaves_like 'able to move existing items to the start'
+ end
+ end
+ end
+
+ describe '#move' do
+ shared_examples 'able to move a new item' do
+ let(:other_issues) { project.issues.reorder(relative_position: :asc).to_a }
+ let!(:previous_order) { other_issues.map(&:id) }
+
+ it 'can place any new item betwen two others' do
+ new_item = create_issue(nil)
+
+ subject.move(new_item, lhs, rhs)
+ new_item.save!
+ lhs.reset
+ rhs.reset
+
+ expect(new_item.relative_position).to be_between(range.first, range.last)
+ expect(new_item.relative_position).to be_between(lhs.relative_position, rhs.relative_position)
+
+ ids = project.issues.reorder(:relative_position).pluck(:id).reject { |id| id == new_item.id }
+ expect(ids).to eq(previous_order)
+ end
+
+ it 'can place any new item after another' do
+ new_item = create_issue(nil)
+
+ subject.move(new_item, lhs, nil)
+ new_item.save!
+ lhs.reset
+
+ expect(new_item.relative_position).to be_between(range.first, range.last)
+ expect(new_item.relative_position).to be > lhs.relative_position
+
+ ids = project.issues.reorder(:relative_position).pluck(:id).reject { |id| id == new_item.id }
+ expect(ids).to eq(previous_order)
+ end
+
+ it 'can place any new item before another' do
+ new_item = create_issue(nil)
+
+ subject.move(new_item, nil, rhs)
+ new_item.save!
+ rhs.reset
+
+ expect(new_item.relative_position).to be_between(range.first, range.last)
+ expect(new_item.relative_position).to be < rhs.relative_position
+
+ ids = project.issues.reorder(:relative_position).pluck(:id).reject { |id| id == new_item.id }
+ expect(ids).to eq(previous_order)
+ end
+ end
+
+ shared_examples 'able to move an existing item' do
+ let(:all_issues) { project.issues.reorder(:relative_position).to_a }
+ let(:item) { all_issues[index] }
+ let(:positions) { project.reset.issues.pluck(:relative_position) }
+ let(:other_issues) { all_issues.reject { |i| i == item } }
+ let!(:previous_order) { other_issues.map(&:id) }
+ let(:new_order) do
+ project.issues.where.not(id: item.id).reorder(:relative_position).pluck(:id)
+ end
+
+ it 'can place any item betwen two others' do
+ subject.move(item, lhs, rhs)
+ item.save!
+ lhs.reset
+ rhs.reset
+
+ expect(positions).to all(be_between(range.first, range.last))
+ expect(positions).to match_array(positions.uniq)
+ expect(item.relative_position).to be_between(lhs.relative_position, rhs.relative_position)
+
+ expect(new_order).to eq(previous_order)
+ end
+
+ def sequence(expected_sequence)
+ range = (expected_sequence.first.relative_position..expected_sequence.last.relative_position)
+
+ project.issues.reorder(:relative_position).where(relative_position: range)
+ end
+
+ it 'can place any item after another' do
+ subject.move(item, lhs, nil)
+ item.save!
+ lhs.reset
+
+ expect(positions).to all(be_between(range.first, range.last))
+ expect(positions).to match_array(positions.uniq)
+ expect(item.relative_position).to be >= lhs.relative_position
+
+ expected_sequence = [lhs, item].uniq
+
+ expect(sequence(expected_sequence)).to eq(expected_sequence)
+
+ expect(new_order).to eq(previous_order)
+ end
+
+ it 'can place any item before another' do
+ subject.move(item, nil, rhs)
+ item.save!
+ rhs.reset
+
+ expect(positions).to all(be_between(range.first, range.last))
+ expect(positions).to match_array(positions.uniq)
+ expect(item.relative_position).to be <= rhs.relative_position
+
+ expected_sequence = [item, rhs].uniq
+
+ expect(sequence(expected_sequence)).to eq(expected_sequence)
+
+ expect(new_order).to eq(previous_order)
+ end
+ end
+
+ context 'all positions are taken' do
+ let(:issues) { full_set }
+ let(:project) { fully_occupied }
+
+ where(:idx_a, :idx_b) do
+ indices.product(indices).select { |a, b| a < b }
+ end
+
+ with_them do
+ let(:lhs) { issues[idx_a].reset }
+ let(:rhs) { issues[idx_b].reset }
+
+ it 'raises an error when placing a new item anywhere' do
+ new_item = create_issue(nil)
+
+ expect { subject.move(new_item, lhs, rhs) }
+ .to raise_error(Gitlab::RelativePositioning::NoSpaceLeft)
+
+ expect { subject.move(new_item, nil, rhs) }
+ .to raise_error(Gitlab::RelativePositioning::NoSpaceLeft)
+
+ expect { subject.move(new_item, lhs, nil) }
+ .to raise_error(Gitlab::RelativePositioning::NoSpaceLeft)
+ end
+
+ where(:index) { indices }
+
+ with_them do
+ it_behaves_like 'able to move an existing item'
+ end
+ end
+ end
+
+ context 'there are no siblings' do
+ let(:project) { no_issues }
+
+ it 'raises an ArgumentError when both first and last are nil' do
+ new_item = create_issue(nil)
+
+ expect { subject.move(new_item, nil, nil) }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'there are a couple of siblings' do
+ where(:pos_movable, :pos_a, :pos_b) do
+ xs = range.to_a
+
+ xs.product(xs).product(xs).map(&:flatten)
+ .select { |vals| vals == vals.uniq && vals[1] < vals[2] }
+ end
+
+ with_them do
+ let(:issues) { three_sibs_set }
+ let(:project) { three_sibs }
+ let(:index) { 0 }
+ let(:lhs) { issues[1] }
+ let(:rhs) { issues[2] }
+
+ before do
+ set_positions([pos_movable, pos_a, pos_b])
+ end
+
+ it_behaves_like 'able to move a new item'
+ it_behaves_like 'able to move an existing item'
+ end
+ end
+
+ context 'at least one position is free' do
+ where(:free_space, :index, :pos_a, :pos_b) do
+ is = indices.reverse.drop(1)
+
+ range.to_a.product(is).product(is).product(is)
+ .map(&:flatten)
+ .select { |_, _, a, b| a < b }
+ end
+
+ with_them do
+ let(:issues) { one_free_space_set }
+ let(:project) { one_free_space }
+ let(:lhs) { issues[pos_a] }
+ let(:rhs) { issues[pos_b] }
+
+ before do
+ set_positions(range.reject { |x| x == free_space })
+ end
+
+ it_behaves_like 'able to move a new item'
+ it_behaves_like 'able to move an existing item'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/relative_positioning/range_spec.rb b/spec/lib/gitlab/relative_positioning/range_spec.rb
new file mode 100644
index 00000000000..c3386336493
--- /dev/null
+++ b/spec/lib/gitlab/relative_positioning/range_spec.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::RelativePositioning::Range do
+ item_a = OpenStruct.new(relative_position: 100, object: :x, positioned?: true)
+ item_b = OpenStruct.new(relative_position: 200, object: :y, positioned?: true)
+
+ before do
+ allow(item_a).to receive(:lhs_neighbour) { nil }
+ allow(item_a).to receive(:rhs_neighbour) { item_b }
+
+ allow(item_b).to receive(:lhs_neighbour) { item_a }
+ allow(item_b).to receive(:rhs_neighbour) { nil }
+ end
+
+ describe 'RelativePositioning.range' do
+ it 'raises if lhs and rhs are nil' do
+ expect { Gitlab::RelativePositioning.range(nil, nil) }.to raise_error(ArgumentError)
+ end
+
+ it 'raises an error if there is no extent' do
+ expect { Gitlab::RelativePositioning.range(item_a, item_a) }.to raise_error(ArgumentError)
+ end
+
+ it 'constructs a closed range when both termini are provided' do
+ range = Gitlab::RelativePositioning.range(item_a, item_b)
+ expect(range).to be_a_kind_of(Gitlab::RelativePositioning::Range)
+ expect(range).to be_a_kind_of(Gitlab::RelativePositioning::ClosedRange)
+ end
+
+ it 'constructs a starting-from range when only the LHS is provided' do
+ range = Gitlab::RelativePositioning.range(item_a, nil)
+ expect(range).to be_a_kind_of(Gitlab::RelativePositioning::Range)
+ expect(range).to be_a_kind_of(Gitlab::RelativePositioning::StartingFrom)
+ end
+
+ it 'constructs an ending-at range when only the RHS is provided' do
+ range = Gitlab::RelativePositioning.range(nil, item_b)
+ expect(range).to be_a_kind_of(Gitlab::RelativePositioning::Range)
+ expect(range).to be_a_kind_of(Gitlab::RelativePositioning::EndingAt)
+ end
+ end
+
+ it 'infers neighbours correctly' do
+ starting_at_a = Gitlab::RelativePositioning.range(item_a, nil)
+ ending_at_b = Gitlab::RelativePositioning.range(nil, item_b)
+
+ expect(starting_at_a).to eq(ending_at_b)
+ end
+
+ describe '#open_on_left?' do
+ where(:lhs, :rhs, :expected_result) do
+ [
+ [item_a, item_b, false],
+ [item_a, nil, false],
+ [nil, item_b, false],
+ [item_b, nil, false],
+ [nil, item_a, true]
+ ]
+ end
+
+ with_them do
+ it 'is true if there is no LHS terminus' do
+ range = Gitlab::RelativePositioning.range(lhs, rhs)
+
+ expect(range.open_on_left?).to be(expected_result)
+ end
+ end
+ end
+
+ describe '#open_on_right?' do
+ where(:lhs, :rhs, :expected_result) do
+ [
+ [item_a, item_b, false],
+ [item_a, nil, false],
+ [nil, item_b, false],
+ [item_b, nil, true],
+ [nil, item_a, false]
+ ]
+ end
+
+ with_them do
+ it 'is true if there is no RHS terminus' do
+ range = Gitlab::RelativePositioning.range(lhs, rhs)
+
+ expect(range.open_on_right?).to be(expected_result)
+ end
+ end
+ end
+
+ describe '#cover?' do
+ item_c = OpenStruct.new(relative_position: 150, object: :z, positioned?: true)
+ item_d = OpenStruct.new(relative_position: 050, object: :w, positioned?: true)
+ item_e = OpenStruct.new(relative_position: 250, object: :r, positioned?: true)
+ item_f = OpenStruct.new(positioned?: false)
+ item_ax = OpenStruct.new(relative_position: 100, object: :not_x, positioned?: true)
+ item_bx = OpenStruct.new(relative_position: 200, object: :not_y, positioned?: true)
+
+ where(:lhs, :rhs, :item, :expected_result) do
+ [
+ [item_a, item_b, item_a, true],
+ [item_a, item_b, item_b, true],
+ [item_a, item_b, item_c, true],
+ [item_a, item_b, item_d, false],
+ [item_a, item_b, item_e, false],
+ [item_a, item_b, item_ax, false],
+ [item_a, item_b, item_bx, false],
+ [item_a, item_b, item_f, false],
+ [item_a, item_b, nil, false],
+
+ [nil, item_b, item_a, true],
+ [nil, item_b, item_b, true],
+ [nil, item_b, item_c, true],
+ [nil, item_b, item_d, false],
+ [nil, item_b, item_e, false],
+ [nil, item_b, item_ax, false],
+ [nil, item_b, item_bx, false],
+ [nil, item_b, item_f, false],
+ [nil, item_b, nil, false],
+
+ [item_a, nil, item_a, true],
+ [item_a, nil, item_b, true],
+ [item_a, nil, item_c, true],
+ [item_a, nil, item_d, false],
+ [item_a, nil, item_e, false],
+ [item_a, nil, item_ax, false],
+ [item_a, nil, item_bx, false],
+ [item_a, nil, item_f, false],
+ [item_a, nil, nil, false],
+
+ [nil, item_a, item_a, true],
+ [nil, item_a, item_b, false],
+ [nil, item_a, item_c, false],
+ [nil, item_a, item_d, true],
+ [nil, item_a, item_e, false],
+ [nil, item_a, item_ax, false],
+ [nil, item_a, item_bx, false],
+ [nil, item_a, item_f, false],
+ [nil, item_a, nil, false],
+
+ [item_b, nil, item_a, false],
+ [item_b, nil, item_b, true],
+ [item_b, nil, item_c, false],
+ [item_b, nil, item_d, false],
+ [item_b, nil, item_e, true],
+ [item_b, nil, item_ax, false],
+ [item_b, nil, item_bx, false],
+ [item_b, nil, item_f, false],
+ [item_b, nil, nil, false]
+ ]
+ end
+
+ with_them do
+ it 'is true when the object is within the bounds of the range' do
+ range = Gitlab::RelativePositioning.range(lhs, rhs)
+
+ expect(range.cover?(item)).to be(expected_result)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/repository_cache_adapter_spec.rb b/spec/lib/gitlab/repository_cache_adapter_spec.rb
index c9ad79234d3..4c57665b41f 100644
--- a/spec/lib/gitlab/repository_cache_adapter_spec.rb
+++ b/spec/lib/gitlab/repository_cache_adapter_spec.rb
@@ -302,7 +302,7 @@ RSpec.describe Gitlab::RepositoryCacheAdapter do
it 'does not expire caches for non-existent methods' do
expect(cache).not_to receive(:expire).with(:nonexistent)
- expect(Rails.logger).to(
+ expect(Gitlab::AppLogger).to(
receive(:error).with("Requested to expire non-existent method 'nonexistent' for Repository"))
repository.expire_method_caches(%i(nonexistent))
diff --git a/spec/lib/gitlab/robots_txt/parser_spec.rb b/spec/lib/gitlab/robots_txt/parser_spec.rb
new file mode 100644
index 00000000000..bb88003ce20
--- /dev/null
+++ b/spec/lib/gitlab/robots_txt/parser_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::RobotsTxt::Parser do
+ describe '#disallowed?' do
+ subject { described_class.new(content).disallowed?(path) }
+
+ context 'a simple robots.txt file' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:content) do
+ <<~TXT
+ User-Agent: *
+ Disallow: /autocomplete/users
+ Disallow: /search
+ Disallow: /api
+ TXT
+ end
+
+ where(:path, :result) do
+ '/autocomplete/users' | true
+ '/autocomplete/users/a.html' | true
+ '/search' | true
+ '/search.html' | true
+ '/api' | true
+ '/api/grapql' | true
+ '/api/index.html' | true
+ '/projects' | false
+ end
+
+ with_them do
+ it { is_expected.to eq(result), "#{path} expected to be #{result}" }
+ end
+ end
+
+ context 'robots.txt file with wildcard' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:content) do
+ <<~TXT
+ User-Agent: *
+ Disallow: /search
+
+ User-Agent: *
+ Disallow: /*/*.git
+ Disallow: /*/archive/
+ Disallow: /*/repository/archive*
+ TXT
+ end
+
+ where(:path, :result) do
+ '/search' | true
+ '/namespace/project.git' | true
+ '/project/archive/' | true
+ '/project/archive/file.gz' | true
+ '/project/repository/archive' | true
+ '/project/repository/archive.gz' | true
+ '/project/repository/archive/file.gz' | true
+ '/projects' | false
+ '/git' | false
+ '/projects/git' | false
+ end
+
+ with_them do
+ it { is_expected.to eq(result), "#{path} expected to be #{result}" }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/search/recent_issues_spec.rb b/spec/lib/gitlab/search/recent_issues_spec.rb
new file mode 100644
index 00000000000..19a41d2aa38
--- /dev/null
+++ b/spec/lib/gitlab/search/recent_issues_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Search::RecentIssues do
+ def create_item(content:, project:)
+ create(:issue, title: content, project: project)
+ end
+
+ it_behaves_like 'search recent items'
+end
diff --git a/spec/lib/gitlab/search/recent_merge_requests_spec.rb b/spec/lib/gitlab/search/recent_merge_requests_spec.rb
new file mode 100644
index 00000000000..c6678ce0342
--- /dev/null
+++ b/spec/lib/gitlab/search/recent_merge_requests_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Search::RecentMergeRequests do
+ def create_item(content:, project:)
+ create(:merge_request, :unique_branches, title: content, target_project: project, source_project: project)
+ end
+
+ it_behaves_like 'search recent items'
+end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 61fa61566cd..b4cf6a568b4 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -6,16 +6,14 @@ RSpec.describe Gitlab::SearchResults do
include ProjectForksHelper
include SearchHelpers
- let(:user) { create(:user) }
- let!(:project) { create(:project, name: 'foo') }
- let!(:issue) { create(:issue, project: project, title: 'foo') }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, name: 'foo') }
+ let_it_be(:issue) { create(:issue, project: project, title: 'foo') }
+ let_it_be(:milestone) { create(:milestone, project: project, title: 'foo') }
+ let(:merge_request) { create(:merge_request, source_project: project, title: 'foo') }
+ let(:filters) { {} }
- let!(:merge_request) do
- create(:merge_request, source_project: project, title: 'foo')
- end
-
- let!(:milestone) { create(:milestone, project: project, title: 'foo') }
- let(:results) { described_class.new(user, Project.all, 'foo') }
+ subject(:results) { described_class.new(user, 'foo', Project.order(:id), filters: filters) }
context 'as a user with access' do
before do
@@ -108,10 +106,10 @@ RSpec.describe Gitlab::SearchResults do
describe '#limited_issues_count' do
it 'runs single SQL query to get the limited amount of issues' do
- create(:milestone, project: project, title: 'foo2')
+ create(:issue, project: project, title: 'foo2')
expect(results).to receive(:issues).with(public_only: true).and_call_original
- expect(results).not_to receive(:issues).with(no_args).and_call_original
+ expect(results).not_to receive(:issues).with(no_args)
expect(results.limited_issues_count).to eq(1)
end
@@ -133,7 +131,7 @@ RSpec.describe Gitlab::SearchResults do
forked_project = fork_project(project, user)
merge_request_2 = create(:merge_request, target_project: project, source_project: forked_project, title: 'foo')
- results = described_class.new(user, Project.where(id: forked_project.id), 'foo')
+ results = described_class.new(user, 'foo', Project.where(id: forked_project.id))
expect(results.objects('merge_requests')).to include merge_request_2
end
@@ -152,6 +150,15 @@ RSpec.describe Gitlab::SearchResults do
results.objects('merge_requests')
end
+
+ context 'filtering' do
+ let!(:opened_result) { create(:merge_request, :opened, source_project: project, title: 'foo opened') }
+ let!(:closed_result) { create(:merge_request, :closed, source_project: project, title: 'foo closed') }
+ let(:scope) { 'merge_requests' }
+ let(:query) { 'foo' }
+
+ include_examples 'search results filtered by state'
+ end
end
describe '#issues' do
@@ -168,6 +175,15 @@ RSpec.describe Gitlab::SearchResults do
results.objects('issues')
end
+
+ context 'filtering' do
+ let(:scope) { 'issues' }
+
+ let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
+ let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo open') }
+
+ include_examples 'search results filtered by state'
+ end
end
describe '#users' do
@@ -214,7 +230,7 @@ RSpec.describe Gitlab::SearchResults do
let!(:security_issue_5) { create(:issue, :confidential, project: project_4, title: 'Security issue 5') }
it 'does not list confidential issues for non project members' do
- results = described_class.new(non_member, limit_projects, query)
+ results = described_class.new(non_member, query, limit_projects)
issues = results.objects('issues')
expect(issues).to include issue
@@ -230,7 +246,7 @@ RSpec.describe Gitlab::SearchResults do
project_1.add_guest(member)
project_2.add_guest(member)
- results = described_class.new(member, limit_projects, query)
+ results = described_class.new(member, query, limit_projects)
issues = results.objects('issues')
expect(issues).to include issue
@@ -243,7 +259,7 @@ RSpec.describe Gitlab::SearchResults do
end
it 'lists confidential issues for author' do
- results = described_class.new(author, limit_projects, query)
+ results = described_class.new(author, query, limit_projects)
issues = results.objects('issues')
expect(issues).to include issue
@@ -256,7 +272,7 @@ RSpec.describe Gitlab::SearchResults do
end
it 'lists confidential issues for assignee' do
- results = described_class.new(assignee, limit_projects, query)
+ results = described_class.new(assignee, query, limit_projects)
issues = results.objects('issues')
expect(issues).to include issue
@@ -272,7 +288,7 @@ RSpec.describe Gitlab::SearchResults do
project_1.add_developer(member)
project_2.add_developer(member)
- results = described_class.new(member, limit_projects, query)
+ results = described_class.new(member, query, limit_projects)
issues = results.objects('issues')
expect(issues).to include issue
@@ -285,7 +301,7 @@ RSpec.describe Gitlab::SearchResults do
end
it 'lists all issues for admin' do
- results = described_class.new(admin, limit_projects, query)
+ results = described_class.new(admin, query, limit_projects)
issues = results.objects('issues')
expect(issues).to include issue
@@ -323,7 +339,7 @@ RSpec.describe Gitlab::SearchResults do
# Global search scope takes user authorized projects, internal projects and public projects.
limit_projects = ProjectsFinder.new(current_user: user).execute
- milestones = described_class.new(user, limit_projects, 'milestone').objects('milestones')
+ milestones = described_class.new(user, 'milestone', limit_projects).objects('milestones')
expect(milestones).to match_array([milestone_1, milestone_2, milestone_3])
end
diff --git a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
index 0ff2dbb234a..4a952a2040a 100644
--- a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
expect { subject }.not_to raise_exception
end
- it 'logs exception message once and raise execption and log stop message' do
+ it 'logs exception message once and raise exception and log stop message' do
expect(Sidekiq.logger).to receive(:warn).once
.with(
class: described_class.to_s,
@@ -68,7 +68,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
pid: pid,
message: 'Stopping Gitlab::SidekiqDaemon::MemoryKiller Daemon')
- expect { subject }.to raise_exception
+ expect { subject }.to raise_exception(Exception, 'My Exception')
end
it 'logs stop message once' do
@@ -402,12 +402,14 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
subject { memory_killer.send(:rss_increase_by_jobs) }
it 'adds up individual rss_increase_by_job' do
+ allow(Gitlab::SidekiqDaemon::Monitor).to receive_message_chain(:instance, :jobs_mutex, :synchronize).and_yield
expect(Gitlab::SidekiqDaemon::Monitor).to receive_message_chain(:instance, :jobs).and_return(running_jobs)
expect(memory_killer).to receive(:rss_increase_by_job).and_return(11, 22)
expect(subject).to eq(33)
end
it 'return 0 if no job' do
+ allow(Gitlab::SidekiqDaemon::Monitor).to receive_message_chain(:instance, :jobs_mutex, :synchronize).and_yield
expect(Gitlab::SidekiqDaemon::Monitor).to receive_message_chain(:instance, :jobs).and_return({})
expect(subject).to eq(0)
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/none_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/none_spec.rb
new file mode 100644
index 00000000000..3250c7cfa31
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/none_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::None do
+ let(:fake_duplicate_job) do
+ instance_double(Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob)
+ end
+
+ subject(:strategy) { described_class.new(fake_duplicate_job) }
+
+ describe '#schedule' do
+ it 'yields without checking for duplicates', :aggregate_failures do
+ expect(fake_duplicate_job).not_to receive(:scheduled?)
+ expect(fake_duplicate_job).not_to receive(:duplicate?)
+ expect(fake_duplicate_job).not_to receive(:check!)
+
+ expect { |b| strategy.schedule({}, &b) }.to yield_control
+ end
+ end
+
+ describe '#perform' do
+ it 'does not delete any locks before executing', :aggregate_failures do
+ expect(fake_duplicate_job).not_to receive(:delete!)
+
+ expect { |b| strategy.perform({}, &b) }.to yield_control
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
index 77d760d1ae3..10b18052e9a 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require 'timecop'
RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecuting do
let(:fake_duplicate_job) do
@@ -77,7 +76,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut
context 'scheduled in the future' do
it 'adds the jid of the existing job to the job hash' do
- Timecop.freeze do
+ freeze_time do
allow(fake_duplicate_job).to receive(:scheduled?).twice.and_return(true)
allow(fake_duplicate_job).to receive(:scheduled_at).and_return(Time.now + time_diff)
allow(fake_duplicate_job).to receive(:options).and_return({ including_scheduled: true })
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
index 5d37e3cb1ae..84856238aab 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies_spec.rb
@@ -8,6 +8,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies do
expect(described_class.for(:until_executing)).to eq(described_class::UntilExecuting)
end
+ it 'returns the right class for `none`' do
+ expect(described_class.for(:none)).to eq(described_class::None)
+ end
+
it 'raises an UnknownStrategyError when passing an unknown key' do
expect { described_class.for(:unknown) }.to raise_error(described_class::UnknownStrategyError)
end
diff --git a/spec/lib/gitlab/sql/except_spec.rb b/spec/lib/gitlab/sql/except_spec.rb
new file mode 100644
index 00000000000..a3d6990ee2e
--- /dev/null
+++ b/spec/lib/gitlab/sql/except_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SQL::Except do
+ it_behaves_like 'SQL set operator', 'EXCEPT'
+end
diff --git a/spec/lib/gitlab/sql/intersect_spec.rb b/spec/lib/gitlab/sql/intersect_spec.rb
new file mode 100644
index 00000000000..cf076796712
--- /dev/null
+++ b/spec/lib/gitlab/sql/intersect_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SQL::Intersect do
+ it_behaves_like 'SQL set operator', 'INTERSECT'
+end
diff --git a/spec/lib/gitlab/sql/union_spec.rb b/spec/lib/gitlab/sql/union_spec.rb
index c8be83c093d..a41551e25bf 100644
--- a/spec/lib/gitlab/sql/union_spec.rb
+++ b/spec/lib/gitlab/sql/union_spec.rb
@@ -3,40 +3,5 @@
require 'spec_helper'
RSpec.describe Gitlab::SQL::Union do
- let(:relation_1) { User.where(email: 'alice@example.com').select(:id) }
- let(:relation_2) { User.where(email: 'bob@example.com').select(:id) }
-
- def to_sql(relation)
- relation.reorder(nil).to_sql
- end
-
- describe '#to_sql' do
- it 'returns a String joining relations together using a UNION' do
- union = described_class.new([relation_1, relation_2])
-
- expect(union.to_sql).to eq("(#{to_sql(relation_1)})\nUNION\n(#{to_sql(relation_2)})")
- end
-
- it 'skips Model.none segements' do
- empty_relation = User.none
- union = described_class.new([empty_relation, relation_1, relation_2])
-
- expect {User.where("users.id IN (#{union.to_sql})").to_a}.not_to raise_error
- expect(union.to_sql).to eq("(#{to_sql(relation_1)})\nUNION\n(#{to_sql(relation_2)})")
- end
-
- it 'uses UNION ALL when removing duplicates is disabled' do
- union = described_class
- .new([relation_1, relation_2], remove_duplicates: false)
-
- expect(union.to_sql).to include('UNION ALL')
- end
-
- it 'returns `NULL` if all relations are empty' do
- empty_relation = User.none
- union = described_class.new([empty_relation, empty_relation])
-
- expect(union.to_sql).to eq('NULL')
- end
- end
+ it_behaves_like 'SQL set operator', 'UNION'
end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
new file mode 100644
index 00000000000..594425c2dab
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig do
+ subject(:config) { described_class.new }
+
+ describe '#data' do
+ subject { config.data }
+
+ it 'returns hardcoded data for now' do
+ is_expected.to match(static_site_generator: 'middleman')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
index 56cdb573785..3433a54be9c 100644
--- a/spec/lib/gitlab/static_site_editor/config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::StaticSiteEditor::Config do
- subject(:config) { described_class.new(repository, ref, file_path, return_url) }
+RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
+ subject(:config) { described_class.new(repository, ref, path, return_url) }
let_it_be(:namespace) { create(:namespace, name: 'namespace') }
let_it_be(:root_group) { create(:group, name: 'group') }
@@ -13,24 +13,26 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
let_it_be(:repository) { project.repository }
let(:ref) { 'master' }
- let(:file_path) { 'README.md' }
+ let(:path) { 'README.md' }
let(:return_url) { 'http://example.com' }
- describe '#payload' do
- subject { config.payload }
+ describe '#data' do
+ subject { config.data }
it 'returns data for the frontend component' do
- is_expected.to eq(
- branch: 'master',
- commit_id: repository.commit.id,
- namespace: 'namespace',
- path: 'README.md',
- project: 'project',
- project_id: project.id,
- return_url: 'http://example.com',
- is_supported_content: 'true',
- base_url: '/namespace/project/-/sse/master%2FREADME.md'
- )
+ is_expected
+ .to match({
+ branch: 'master',
+ commit_id: repository.commit.id,
+ namespace: 'namespace',
+ path: 'README.md',
+ project: 'project',
+ project_id: project.id,
+ return_url: 'http://example.com',
+ is_supported_content: 'true',
+ base_url: '/namespace/project/-/sse/master%2FREADME.md',
+ merge_requests_illustration_path: %r{illustrations/merge_requests}
+ })
end
context 'when namespace is a subgroup' do
@@ -49,7 +51,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
before do
repository.create_file(
project.creator,
- file_path,
+ path,
'',
message: 'message',
branch_name: 'master'
@@ -57,7 +59,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
end
context 'when feature flag is enabled' do
- let(:file_path) { 'FEATURE_ON.md.erb' }
+ let(:path) { 'FEATURE_ON.md.erb' }
before do
stub_feature_flags(sse_erb_support: project)
@@ -67,7 +69,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
end
context 'when feature flag is disabled' do
- let(:file_path) { 'FEATURE_OFF.md.erb' }
+ let(:path) { 'FEATURE_OFF.md.erb' }
before do
stub_feature_flags(sse_erb_support: false)
@@ -78,7 +80,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
end
context 'when file path is nested' do
- let(:file_path) { 'lib/README.md' }
+ let(:path) { 'lib/README.md' }
it { is_expected.to include(base_url: '/namespace/project/-/sse/master%2Flib%2FREADME.md') }
end
@@ -90,19 +92,19 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
end
context 'when file does not have a markdown extension' do
- let(:file_path) { 'README.txt' }
+ let(:path) { 'README.txt' }
it { is_expected.to include(is_supported_content: 'false') }
end
context 'when file does not have an extension' do
- let(:file_path) { 'README' }
+ let(:path) { 'README' }
it { is_expected.to include(is_supported_content: 'false') }
end
context 'when file does not exist' do
- let(:file_path) { 'UNKNOWN.md' }
+ let(:path) { 'UNKNOWN.md' }
it { is_expected.to include(is_supported_content: 'false') }
end
diff --git a/spec/lib/gitlab/submodule_links_spec.rb b/spec/lib/gitlab/submodule_links_spec.rb
index c69326e12be..e2bbda81780 100644
--- a/spec/lib/gitlab/submodule_links_spec.rb
+++ b/spec/lib/gitlab/submodule_links_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::SubmoduleLinks do
end
it 'returns no links' do
- expect(subject).to eq([nil, nil])
+ expect(subject).to be_nil
end
end
@@ -28,17 +28,28 @@ RSpec.describe Gitlab::SubmoduleLinks do
end
it 'returns no links' do
- expect(subject).to eq([nil, nil])
+ expect(subject).to be_nil
end
end
context 'when the submodule is known' do
before do
- stub_urls({ 'gitlab-foss' => 'git@gitlab.com:gitlab-org/gitlab-foss.git' })
+ gitlab_foss = 'git@gitlab.com:gitlab-org/gitlab-foss.git'
+
+ stub_urls({
+ 'ref' => { 'gitlab-foss' => gitlab_foss },
+ 'other_ref' => { 'gitlab-foss' => gitlab_foss },
+ 'signed-commits' => { 'gitlab-foss' => gitlab_foss },
+ 'special_ref' => { 'gitlab-foss' => 'git@OTHER.com:gitlab-org/gitlab-foss.git' }
+ })
end
it 'returns links and caches the by ref' do
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
+ aggregate_failures do
+ expect(subject.web).to eq('https://gitlab.com/gitlab-org/gitlab-foss')
+ expect(subject.tree).to eq('https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash')
+ expect(subject.compare).to be_nil
+ end
cache_store = links.instance_variable_get("@cache_store")
@@ -49,13 +60,46 @@ RSpec.describe Gitlab::SubmoduleLinks do
let(:ref) { 'signed-commits' }
it 'returns links' do
- expect(subject).to eq(['https://gitlab.com/gitlab-org/gitlab-foss', 'https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash'])
+ aggregate_failures do
+ expect(subject.web).to eq('https://gitlab.com/gitlab-org/gitlab-foss')
+ expect(subject.tree).to eq('https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash')
+ expect(subject.compare).to be_nil
+ end
+ end
+ end
+
+ context 'and the diff information is available' do
+ let(:old_ref) { 'other_ref' }
+ let(:diff_file) { double(old_blob: double(id: 'old-hash', path: 'gitlab-foss'), old_content_sha: old_ref) }
+
+ subject { links.for(submodule_item, ref, diff_file) }
+
+ it 'the returned links include the compare link' do
+ aggregate_failures do
+ expect(subject.web).to eq('https://gitlab.com/gitlab-org/gitlab-foss')
+ expect(subject.tree).to eq('https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash')
+ expect(subject.compare).to eq('https://gitlab.com/gitlab-org/gitlab-foss/-/compare/old-hash...hash')
+ end
+ end
+
+ context 'but the submodule url has changed' do
+ let(:old_ref) { 'special_ref' }
+
+ it 'the returned links do not include the compare link' do
+ aggregate_failures do
+ expect(subject.web).to eq('https://gitlab.com/gitlab-org/gitlab-foss')
+ expect(subject.tree).to eq('https://gitlab.com/gitlab-org/gitlab-foss/-/tree/hash')
+ expect(subject.compare).to be_nil
+ end
+ end
end
end
end
end
- def stub_urls(urls)
- allow(repo).to receive(:submodule_urls_for).and_return(urls)
+ def stub_urls(urls_by_ref)
+ allow(repo).to receive(:submodule_urls_for) do |ref|
+ urls_by_ref[ref] if urls_by_ref
+ end
end
end
diff --git a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
index e776284b3e8..e2751d194d3 100644
--- a/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
+++ b/spec/lib/gitlab/template/finders/global_template_finder_spec.rb
@@ -15,9 +15,9 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do
FileUtils.rm_rf(base_dir)
end
- subject(:finder) { described_class.new(base_dir, '', { 'General' => '', 'Bar' => 'Bar' }, exclusions: exclusions) }
+ subject(:finder) { described_class.new(base_dir, '', { 'General' => '', 'Bar' => 'Bar' }, excluded_patterns: excluded_patterns) }
- let(:exclusions) { [] }
+ let(:excluded_patterns) { [] }
describe '.find' do
context 'with a non-prefixed General template' do
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do
end
context 'while listed as an exclusion' do
- let(:exclusions) { %w[test-template] }
+ let(:excluded_patterns) { [%r{^test-template$}] }
it 'does not find the template without a prefix' do
expect(finder.find('test-template')).to be_nil
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do
end
context 'while listed as an exclusion' do
- let(:exclusions) { %w[Bar/test-template] }
+ let(:excluded_patterns) { [%r{^Bar/test-template$}] }
it 'does not find the template with a prefix' do
expect(finder.find('Bar/test-template')).to be_nil
@@ -96,6 +96,17 @@ RSpec.describe Gitlab::Template::Finders::GlobalTemplateFinder do
expect(finder.find('Bar/test-template')).to be_nil
end
end
+
+ context 'while listed as an exclusion' do
+ let(:excluded_patterns) { [%r{\.latest$}] }
+
+ it 'excludes the template matched the pattern' do
+ create_template!('test-template.latest')
+
+ expect(finder.find('test-template')).to be_present
+ expect(finder.find('test-template.latest')).to be_nil
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb
index 55444114d39..26c83ed6793 100644
--- a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb
+++ b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb
@@ -13,6 +13,12 @@ RSpec.describe Gitlab::Template::GitlabCiYmlTemplate do
expect(all).to include('Docker')
expect(all).to include('Ruby')
end
+
+ it 'does not include Browser-Performance template in FOSS' do
+ all = subject.all.map(&:name)
+
+ expect(all).not_to include('Browser-Performance') unless Gitlab.ee?
+ end
end
describe '#content' do
diff --git a/spec/lib/gitlab/tracking/incident_management_spec.rb b/spec/lib/gitlab/tracking/incident_management_spec.rb
index e8131b4eeee..9c49c76ead7 100644
--- a/spec/lib/gitlab/tracking/incident_management_spec.rb
+++ b/spec/lib/gitlab/tracking/incident_management_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Tracking::IncidentManagement do
.with(
'IncidentManagement::Settings',
label,
- value || kind_of(Hash)
+ value || any_args
)
end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 65b6d9c8899..f0bf7b9964f 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Gitlab::Tracking do
stub_application_setting(snowplow_collector_hostname: 'gitfoo.com')
stub_application_setting(snowplow_cookie_domain: '.gitfoo.com')
stub_application_setting(snowplow_app_id: '_abc123_')
- stub_application_setting(snowplow_iglu_registry_url: 'https://example.org')
end
describe '.snowplow_options' do
@@ -20,8 +19,7 @@ RSpec.describe Gitlab::Tracking do
cookieDomain: '.gitfoo.com',
appId: '_abc123_',
formTracking: true,
- linkClickTracking: true,
- igluRegistryUrl: 'https://example.org'
+ linkClickTracking: true
}
expect(subject.snowplow_options(nil)).to match(expected_fields)
diff --git a/spec/lib/gitlab/updated_notes_paginator_spec.rb b/spec/lib/gitlab/updated_notes_paginator_spec.rb
index eedc11777d4..ce6a7719fb4 100644
--- a/spec/lib/gitlab/updated_notes_paginator_spec.rb
+++ b/spec/lib/gitlab/updated_notes_paginator_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::UpdatedNotesPaginator do
let(:page_1_boundary) { page_1.last.updated_at + NotesFinder::FETCH_OVERLAP }
around do |example|
- Timecop.freeze do
+ freeze_time do
example.run
end
end
diff --git a/spec/lib/gitlab/usage_data/topology_spec.rb b/spec/lib/gitlab/usage_data/topology_spec.rb
index 7f4a25297e6..b8462e0290c 100644
--- a/spec/lib/gitlab/usage_data/topology_spec.rb
+++ b/spec/lib/gitlab/usage_data/topology_spec.rb
@@ -6,23 +6,23 @@ RSpec.describe Gitlab::UsageData::Topology do
include UsageDataHelpers
describe '#topology_usage_data' do
- subject { described_class.new.topology_usage_data }
+ subject { topology.topology_usage_data }
+
+ let(:topology) { described_class.new }
+ let(:prometheus_client) { Gitlab::PrometheusClient.new('http://localhost:9090') }
+ let(:fallback) { {} }
before do
# this pins down time shifts when benchmarking durations
allow(Process).to receive(:clock_gettime).and_return(0)
end
- context 'when embedded Prometheus server is enabled' do
- before do
- expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
- expect(Gitlab::Prometheus::Internal).to receive(:uri).and_return('http://prom:9090')
- end
-
+ shared_examples 'query topology data from Prometheus' do
context 'tracking node metrics' do
it 'contains node level metrics for each instance' do
- expect_prometheus_api_to(
+ expect_prometheus_client_to(
receive_app_request_volume_query,
+ receive_query_apdex_ratio_query,
receive_node_memory_query,
receive_node_memory_utilization_query,
receive_node_cpu_count_query,
@@ -38,6 +38,7 @@ RSpec.describe Gitlab::UsageData::Topology do
expect(subject[:topology]).to eq({
duration_s: 0,
application_requests_per_hour: 36,
+ query_apdex_weekly_average: 0.996,
failures: [],
nodes: [
{
@@ -105,8 +106,9 @@ RSpec.describe Gitlab::UsageData::Topology do
context 'and some node memory metrics are missing' do
it 'removes the respective entries and includes the failures' do
- expect_prometheus_api_to(
+ expect_prometheus_client_to(
receive_app_request_volume_query(result: []),
+ receive_query_apdex_ratio_query(result: []),
receive_node_memory_query(result: []),
receive_node_memory_utilization_query(result: []),
receive_node_cpu_count_query,
@@ -123,6 +125,7 @@ RSpec.describe Gitlab::UsageData::Topology do
duration_s: 0,
failures: [
{ 'app_requests' => 'empty_result' },
+ { 'query_apdex' => 'empty_result' },
{ 'node_memory' => 'empty_result' },
{ 'node_memory_utilization' => 'empty_result' },
{ 'service_rss' => 'empty_result' },
@@ -243,8 +246,9 @@ RSpec.describe Gitlab::UsageData::Topology do
end
it 'normalizes equivalent instance values and maps them to the same node' do
- expect_prometheus_api_to(
+ expect_prometheus_client_to(
receive_app_request_volume_query(result: []),
+ receive_query_apdex_ratio_query(result: []),
receive_node_memory_query(result: node_memory_response),
receive_node_memory_utilization_query(result: node_memory_utilization_response),
receive_node_cpu_count_query(result: []),
@@ -261,6 +265,7 @@ RSpec.describe Gitlab::UsageData::Topology do
duration_s: 0,
failures: [
{ 'app_requests' => 'empty_result' },
+ { 'query_apdex' => 'empty_result' },
{ 'node_cpus' => 'empty_result' },
{ 'node_cpu_utilization' => 'empty_result' },
{ 'service_uss' => 'empty_result' },
@@ -307,8 +312,9 @@ RSpec.describe Gitlab::UsageData::Topology do
context 'and node metrics are missing but service metrics exist' do
it 'still reports service metrics' do
- expect_prometheus_api_to(
+ expect_prometheus_client_to(
receive_app_request_volume_query(result: []),
+ receive_query_apdex_ratio_query(result: []),
receive_node_memory_query(result: []),
receive_node_memory_utilization_query(result: []),
receive_node_cpu_count_query(result: []),
@@ -325,6 +331,7 @@ RSpec.describe Gitlab::UsageData::Topology do
duration_s: 0,
failures: [
{ 'app_requests' => 'empty_result' },
+ { 'query_apdex' => 'empty_result' },
{ 'node_memory' => 'empty_result' },
{ 'node_memory_utilization' => 'empty_result' },
{ 'node_cpus' => 'empty_result' },
@@ -380,8 +387,9 @@ RSpec.describe Gitlab::UsageData::Topology do
end
it 'filters out unknown service data and reports the unknown services as a failure' do
- expect_prometheus_api_to(
+ expect_prometheus_client_to(
receive_app_request_volume_query(result: []),
+ receive_query_apdex_ratio_query(result: []),
receive_node_memory_query(result: []),
receive_node_memory_utilization_query(result: []),
receive_node_cpu_count_query(result: []),
@@ -404,24 +412,25 @@ RSpec.describe Gitlab::UsageData::Topology do
context 'and an error is raised when querying Prometheus' do
context 'without timeout failures' do
it 'returns empty result and executes subsequent queries as usual' do
- expect_prometheus_api_to receive(:query)
- .at_least(:once)
- .and_raise(Gitlab::PrometheusClient::ConnectionError)
+ expect_prometheus_client_to(
+ receive(:query).at_least(:once).and_raise(Gitlab::PrometheusClient::UnexpectedResponseError)
+ )
expect(subject[:topology]).to eq({
duration_s: 0,
failures: [
- { 'app_requests' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_memory' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_memory_utilization' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_cpus' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_cpu_utilization' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_uname_info' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_rss' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_uss' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_pss' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_process_count' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_workers' => 'Gitlab::PrometheusClient::ConnectionError' }
+ { 'app_requests' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'query_apdex' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'node_memory' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'node_memory_utilization' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'node_cpus' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'node_cpu_utilization' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'node_uname_info' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'service_rss' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'service_uss' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'service_pss' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'service_process_count' => 'Gitlab::PrometheusClient::UnexpectedResponseError' },
+ { 'service_workers' => 'Gitlab::PrometheusClient::UnexpectedResponseError' }
],
nodes: []
})
@@ -435,13 +444,15 @@ RSpec.describe Gitlab::UsageData::Topology do
with_them do
it 'returns empty result and cancelled subsequent queries' do
- expect_prometheus_api_to receive(:query)
- .and_raise(exception)
+ expect_prometheus_client_to(
+ receive(:query).and_raise(exception)
+ )
expect(subject[:topology]).to eq({
duration_s: 0,
failures: [
{ 'app_requests' => exception.to_s },
+ { 'query_apdex' => 'timeout_cancellation' },
{ 'node_memory' => 'timeout_cancellation' },
{ 'node_memory_utilization' => 'timeout_cancellation' },
{ 'node_cpus' => 'timeout_cancellation' },
@@ -461,10 +472,8 @@ RSpec.describe Gitlab::UsageData::Topology do
end
end
- context 'when embedded Prometheus server is disabled' do
- it 'returns empty result with no failures' do
- expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
-
+ shared_examples 'returns empty result with no failures' do
+ it do
expect(subject[:topology]).to eq({
duration_s: 0,
failures: []
@@ -472,9 +481,25 @@ RSpec.describe Gitlab::UsageData::Topology do
end
end
+ context 'can reach a ready Prometheus client' do
+ before do
+ expect(topology).to receive(:with_prometheus_client).and_yield(prometheus_client)
+ end
+
+ it_behaves_like 'query topology data from Prometheus'
+ end
+
+ context 'can not reach a ready Prometheus client' do
+ before do
+ expect(topology).to receive(:with_prometheus_client).and_return(fallback)
+ end
+
+ it_behaves_like 'returns empty result with no failures'
+ end
+
context 'when top-level function raises error' do
it 'returns empty result with generic failure' do
- allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_raise(RuntimeError)
+ expect(topology).to receive(:with_prometheus_client).and_raise(RuntimeError)
expect(subject[:topology]).to eq({
duration_s: 0,
@@ -486,6 +511,14 @@ RSpec.describe Gitlab::UsageData::Topology do
end
end
+ def receive_ready_check_query(result: nil, raise_error: nil)
+ if raise_error.nil?
+ receive(:ready?).and_return(result.nil? ? true : result)
+ else
+ receive(:ready?).and_raise(raise_error)
+ end
+ end
+
def receive_app_request_volume_query(result: nil)
receive(:query)
.with(/gitlab_usage_ping:ops:rate/)
@@ -497,6 +530,17 @@ RSpec.describe Gitlab::UsageData::Topology do
])
end
+ def receive_query_apdex_ratio_query(result: nil)
+ receive(:query)
+ .with(/gitlab_usage_ping:sql_duration_apdex:ratio_rate5m/)
+ .and_return(result || [
+ {
+ 'metric' => {},
+ 'value' => [1000, '0.996']
+ }
+ ])
+ end
+
def receive_node_memory_query(result: nil)
receive(:query)
.with(/node_memory_total_bytes/, an_instance_of(Hash))
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
new file mode 100644
index 00000000000..2a674557b76
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_redis_shared_state do
+ let(:user1) { build(:user, id: 1) }
+ let(:user2) { build(:user, id: 2) }
+ let(:user3) { build(:user, id: 3) }
+ let(:time) { Time.zone.now }
+
+ shared_examples 'tracks and counts action' do
+ before do
+ stub_application_setting(usage_ping_enabled: true)
+ end
+
+ specify do
+ aggregate_failures do
+ expect(track_action(author: user1)).to be_truthy
+ expect(track_action(author: user1)).to be_truthy
+ expect(track_action(author: user2)).to be_truthy
+ expect(track_action(author: user3, time: time - 3.days)).to be_truthy
+
+ expect(count_unique(date_from: time, date_to: Date.today)).to eq(2)
+ expect(count_unique(date_from: time - 5.days, date_to: Date.tomorrow)).to eq(3)
+ end
+ end
+
+ it 'does not track edit actions if author is not present' do
+ expect(track_action(author: nil)).to be_nil
+ end
+
+ context 'when feature flag track_editor_edit_actions is disabled' do
+ it 'does not track edit actions' do
+ stub_feature_flags(track_editor_edit_actions: false)
+
+ expect(track_action(author: user1)).to be_nil
+ end
+ end
+ end
+
+ context 'for web IDE edit actions' do
+ it_behaves_like 'tracks and counts action' do
+ def track_action(params)
+ described_class.track_web_ide_edit_action(params)
+ end
+
+ def count_unique(params)
+ described_class.count_web_ide_edit_actions(params)
+ end
+ end
+ end
+
+ context 'for SFE edit actions' do
+ it_behaves_like 'tracks and counts action' do
+ def track_action(params)
+ described_class.track_sfe_edit_action(params)
+ end
+
+ def count_unique(params)
+ described_class.count_sfe_edit_actions(params)
+ end
+ end
+ end
+
+ context 'for snippet editor edit actions' do
+ it_behaves_like 'tracks and counts action' do
+ def track_action(params)
+ described_class.track_snippet_editor_edit_action(params)
+ end
+
+ def count_unique(params)
+ described_class.count_snippet_editor_edit_actions(params)
+ end
+ end
+ end
+
+ it 'can return the count of actions per user deduplicated ' do
+ described_class.track_web_ide_edit_action(author: user1)
+ described_class.track_snippet_editor_edit_action(author: user1)
+ described_class.track_sfe_edit_action(author: user1)
+ described_class.track_web_ide_edit_action(author: user2, time: time - 2.days)
+ described_class.track_web_ide_edit_action(author: user3, time: time - 3.days)
+ described_class.track_snippet_editor_edit_action(author: user3, time: time - 3.days)
+ described_class.track_sfe_edit_action(author: user3, time: time - 3.days)
+
+ expect(described_class.count_edit_using_editor(date_from: time, date_to: Date.today)).to eq(1)
+ expect(described_class.count_edit_using_editor(date_from: time - 5.days, date_to: Date.tomorrow)).to eq(3)
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 2ab349a67d9..f881da71251 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -8,25 +8,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
- let(:weekly_event) { 'g_analytics_contribution' }
- let(:daily_event) { 'g_search' }
- let(:different_aggregation) { 'different_aggregation' }
-
- let(:known_events) do
- [
- { name: "g_analytics_contribution", redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "weekly" },
- { name: "g_analytics_valuestream", redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "daily" },
- { name: "g_analytics_productivity", redis_slot: "analytics", category: "productivity", expiry: 84, aggregation: "weekly" },
- { name: "g_compliance_dashboard", redis_slot: "compliance", category: "compliance", aggregation: "weekly" },
- { name: "g_search", category: "global", aggregation: "daily" },
- { name: "different_aggregation", category: "global", aggregation: "monthly" }
- ].map(&:with_indifferent_access)
- end
-
- before do
- allow(described_class).to receive(:known_events).and_return(known_events)
- end
-
around do |example|
# We need to freeze to a reference time
# because visits are grouped by the week number in the year
@@ -37,77 +18,239 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
Timecop.freeze(reference_time) { example.run }
end
- describe '.track_event' do
- it "raise error if metrics don't have same aggregation" do
- expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
+ describe '.categories' do
+ it 'gets all unique category names' do
+ expect(described_class.categories).to contain_exactly('analytics', 'compliance', 'ide_edit', 'search', 'source_code', 'incident_management', 'issues_edit')
end
+ end
+
+ describe 'known_events' do
+ let(:weekly_event) { 'g_analytics_contribution' }
+ let(:daily_event) { 'g_analytics_search' }
+ let(:analytics_slot_event) { 'g_analytics_contribution' }
+ let(:compliance_slot_event) { 'g_compliance_dashboard' }
+ let(:category_analytics_event) { 'g_analytics_search' }
+ let(:category_productivity_event) { 'g_analytics_productivity' }
+ let(:no_slot) { 'no_slot' }
+ let(:different_aggregation) { 'different_aggregation' }
+ let(:custom_daily_event) { 'g_analytics_custom' }
- it 'raise error if metrics of unknown aggregation' do
- expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
+ let(:global_category) { 'global' }
+ let(:compliance_category) {'compliance' }
+ let(:productivity_category) {'productivity' }
+ let(:analytics_category) { 'analytics' }
+
+ let(:known_events) do
+ [
+ { name: weekly_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "weekly" },
+ { name: daily_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "daily" },
+ { name: category_productivity_event, redis_slot: "analytics", category: productivity_category, aggregation: "weekly" },
+ { name: compliance_slot_event, redis_slot: "compliance", category: compliance_category, aggregation: "weekly" },
+ { name: no_slot, category: global_category, aggregation: "daily" },
+ { name: different_aggregation, category: global_category, aggregation: "monthly" }
+ ].map(&:with_indifferent_access)
end
- end
- describe '.unique_events' do
before do
- # events in current week, should not be counted as week is not complete
- described_class.track_event(entity1, weekly_event, Date.current)
- described_class.track_event(entity2, weekly_event, Date.current)
+ allow(described_class).to receive(:known_events).and_return(known_events)
+ end
- # Events last week
- described_class.track_event(entity1, weekly_event, 2.days.ago)
- described_class.track_event(entity1, weekly_event, 2.days.ago)
+ describe '.events_for_category' do
+ it 'gets the event names for given category' do
+ expect(described_class.events_for_category(:analytics)).to contain_exactly(weekly_event, daily_event)
+ end
+ end
- # Events 2 weeks ago
- described_class.track_event(entity1, weekly_event, 2.weeks.ago)
+ describe '.track_event' do
+ context 'when usage_ping is disabled' do
+ it 'does not track the event' do
+ stub_application_setting(usage_ping_enabled: false)
- # Events 4 weeks ago
- described_class.track_event(entity3, weekly_event, 4.weeks.ago)
- described_class.track_event(entity4, weekly_event, 29.days.ago)
+ described_class.track_event(entity1, weekly_event, Date.current)
- # events in current day should be counted in daily aggregation
- described_class.track_event(entity1, daily_event, Date.current)
- described_class.track_event(entity2, daily_event, Date.current)
+ expect(Gitlab::Redis::HLL).not_to receive(:add)
+ end
+ end
- # Events last week
- described_class.track_event(entity1, daily_event, 2.days.ago)
- described_class.track_event(entity1, daily_event, 2.days.ago)
+ context 'when usage_ping is enabled' do
+ before do
+ stub_application_setting(usage_ping_enabled: true)
+ end
- # Events 2 weeks ago
- described_class.track_event(entity1, daily_event, 14.days.ago)
+ it "raise error if metrics don't have same aggregation" do
+ expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
+ end
- # Events 4 weeks ago
- described_class.track_event(entity3, daily_event, 28.days.ago)
- described_class.track_event(entity4, daily_event, 29.days.ago)
- end
+ it 'raise error if metrics of unknown aggregation' do
+ expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
+ end
- it 'raise error if metrics are not in the same slot' do
- expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_compliance_dashboard), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
- end
+ context 'for weekly events' do
+ it 'sets the keys in Redis to expire automatically after the given expiry time' do
+ described_class.track_event(entity1, "g_analytics_contribution")
- it 'raise error if metrics are not in the same category' do
- expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_analytics_productivity), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
- end
+ Gitlab::Redis::SharedState.with do |redis|
+ keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a
+ expect(keys).not_to be_empty
+
+ keys.each do |key|
+ expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
+ end
+ end
+ end
+
+ it 'sets the keys in Redis to expire automatically after 6 weeks by default' do
+ described_class.track_event(entity1, "g_compliance_dashboard")
+
+ Gitlab::Redis::SharedState.with do |redis|
+ keys = redis.scan_each(match: "g_{compliance}_dashboard-*").to_a
+ expect(keys).not_to be_empty
+
+ keys.each do |key|
+ expect(redis.ttl(key)).to be_within(5.seconds).of(6.weeks)
+ end
+ end
+ end
+ end
+
+ context 'for daily events' do
+ it 'sets the keys in Redis to expire after the given expiry time' do
+ described_class.track_event(entity1, "g_analytics_search")
+
+ Gitlab::Redis::SharedState.with do |redis|
+ keys = redis.scan_each(match: "*-g_{analytics}_search").to_a
+ expect(keys).not_to be_empty
+
+ keys.each do |key|
+ expect(redis.ttl(key)).to be_within(5.seconds).of(84.days)
+ end
+ end
+ end
+
+ it 'sets the keys in Redis to expire after 29 days by default' do
+ described_class.track_event(entity1, "no_slot")
- it "raise error if metrics don't have same aggregation" do
- expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_analytics_valuestream), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
+ Gitlab::Redis::SharedState.with do |redis|
+ keys = redis.scan_each(match: "*-{no_slot}").to_a
+ expect(keys).not_to be_empty
+
+ keys.each do |key|
+ expect(redis.ttl(key)).to be_within(5.seconds).of(29.days)
+ end
+ end
+ end
+ end
+ end
end
- context 'when data for the last complete week' do
- it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
+ describe '.unique_events' do
+ before do
+ # events in current week, should not be counted as week is not complete
+ described_class.track_event(entity1, weekly_event, Date.current)
+ described_class.track_event(entity2, weekly_event, Date.current)
+
+ # Events last week
+ described_class.track_event(entity1, weekly_event, 2.days.ago)
+ described_class.track_event(entity1, weekly_event, 2.days.ago)
+ described_class.track_event(entity1, no_slot, 2.days.ago)
+
+ # Events 2 weeks ago
+ described_class.track_event(entity1, weekly_event, 2.weeks.ago)
+
+ # Events 4 weeks ago
+ described_class.track_event(entity3, weekly_event, 4.weeks.ago)
+ described_class.track_event(entity4, weekly_event, 29.days.ago)
+
+ # events in current day should be counted in daily aggregation
+ described_class.track_event(entity1, daily_event, Date.current)
+ described_class.track_event(entity2, daily_event, Date.current)
+
+ # Events last week
+ described_class.track_event(entity1, daily_event, 2.days.ago)
+ described_class.track_event(entity1, daily_event, 2.days.ago)
+
+ # Events 2 weeks ago
+ described_class.track_event(entity1, daily_event, 14.days.ago)
+
+ # Events 4 weeks ago
+ described_class.track_event(entity3, daily_event, 28.days.ago)
+ described_class.track_event(entity4, daily_event, 29.days.ago)
+ end
+
+ it 'raise error if metrics are not in the same slot' do
+ expect { described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
+ end
+
+ it 'raise error if metrics are not in the same category' do
+ expect { described_class.unique_events(event_names: [category_analytics_event, category_productivity_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
+ end
+
+ it "raise error if metrics don't have same aggregation" do
+ expect { described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
+ end
+
+ context 'when data for the last complete week' do
+ it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
+ end
+
+ context 'when data for the last 4 complete weeks' do
+ it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
+ end
+
+ context 'when data for the week 4 weeks ago' do
+ it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
+ end
+
+ context 'when using daily aggregation' do
+ it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
+ it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
+ it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
+ end
+
+ context 'when no slot is set' do
+ it { expect(described_class.unique_events(event_names: no_slot, start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
+ end
end
+ end
- context 'when data for the last 4 complete weeks' do
- it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
+ describe 'unique_events_data' do
+ let(:known_events) do
+ [
+ { name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
+ { name: 'event2_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
+ { name: 'event3', category: 'category2', aggregation: "weekly" },
+ { name: 'event4', category: 'category2', aggregation: "weekly" }
+ ].map(&:with_indifferent_access)
end
- context 'when data for the week 4 weeks ago' do
- it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
+ before do
+ allow(described_class).to receive(:known_events).and_return(known_events)
+ allow(described_class).to receive(:categories).and_return(%w(category1 category2))
+
+ described_class.track_event(entity1, 'event1_slot', 2.days.ago)
+ described_class.track_event(entity2, 'event2_slot', 2.days.ago)
+ described_class.track_event(entity3, 'event2_slot', 2.weeks.ago)
+
+ # events in different slots
+ described_class.track_event(entity2, 'event3', 2.days.ago)
+ described_class.track_event(entity2, 'event4', 2.days.ago)
end
- context 'when using daily aggregation' do
- it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
- it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
- it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
+ it 'returns the number of unique events for all known events' do
+ results = {
+ 'category1' => {
+ 'event1_slot' => 1,
+ 'event2_slot' => 1,
+ 'category1_total_unique_counts_weekly' => 2,
+ 'category1_total_unique_counts_monthly' => 3
+ },
+ 'category2' => {
+ 'event3' => 1,
+ 'event4' => 1
+ }
+ }
+
+ expect(subject.unique_events_data).to eq(results)
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
new file mode 100644
index 00000000000..479fe36bcdd
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_gitlab_redis_shared_state do
+ let(:user1) { build(:user, id: 1) }
+ let(:user2) { build(:user, id: 2) }
+ let(:user3) { build(:user, id: 3) }
+ let(:time) { Time.zone.now }
+
+ shared_examples 'tracks and counts action' do
+ before do
+ stub_application_setting(usage_ping_enabled: true)
+ end
+
+ def count_unique(date_from:, date_to:)
+ Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: action, start_date: date_from, end_date: date_to)
+ end
+
+ specify do
+ aggregate_failures do
+ expect(track_action(author: user1)).to be_truthy
+ expect(track_action(author: user1)).to be_truthy
+ expect(track_action(author: user2)).to be_truthy
+ expect(track_action(author: user3, time: time - 3.days)).to be_truthy
+
+ expect(count_unique(date_from: time, date_to: time)).to eq(2)
+ expect(count_unique(date_from: time - 5.days, date_to: 1.day.since(time))).to eq(3)
+ end
+ end
+
+ it 'does not track edit actions if author is not present' do
+ expect(track_action(author: nil)).to be_nil
+ end
+
+ context 'when feature flag track_issue_activity_actions is disabled' do
+ it 'does not track edit actions' do
+ stub_feature_flags(track_issue_activity_actions: false)
+
+ expect(track_action(author: user1)).to be_nil
+ end
+ end
+ end
+
+ context 'for Issue title edit actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_TITLE_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_title_changed_action(params)
+ end
+ end
+ end
+
+ context 'for Issue description edit actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_description_changed_action(params)
+ end
+ end
+ end
+
+ context 'for Issue assignee edit actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_assignee_changed_action(params)
+ end
+ end
+ end
+
+ context 'for Issue make confidential actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL }
+
+ def track_action(params)
+ described_class.track_issue_made_confidential_action(params)
+ end
+ end
+ end
+
+ context 'for Issue make visible actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_MADE_VISIBLE }
+
+ def track_action(params)
+ described_class.track_issue_made_visible_action(params)
+ end
+ end
+ end
+
+ it 'can return the count of actions per user deduplicated', :aggregate_failures do
+ described_class.track_issue_title_changed_action(author: user1)
+ described_class.track_issue_description_changed_action(author: user1)
+ described_class.track_issue_assignee_changed_action(author: user1)
+ described_class.track_issue_title_changed_action(author: user2, time: time - 2.days)
+ described_class.track_issue_title_changed_action(author: user3, time: time - 3.days)
+ described_class.track_issue_description_changed_action(author: user3, time: time - 3.days)
+ described_class.track_issue_assignee_changed_action(author: user3, time: time - 3.days)
+
+ events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(described_class::ISSUE_CATEGORY)
+ today_count = Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: events, start_date: time, end_date: time)
+ week_count = Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: events, start_date: time - 5.days, end_date: 1.day.since(time))
+
+ expect(today_count).to eq(1)
+ expect(week_count).to eq(3)
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
new file mode 100644
index 00000000000..8f9a3e0cd9e
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
+ it_behaves_like 'a redis usage counter', 'Kubernetes Agent', :gitops_sync
+
+ it_behaves_like 'a redis usage counter with totals', :kubernetes_agent, gitops_sync: 1
+
+ describe '.increment_gitops_sync' do
+ it 'increments the gtops_sync counter by the new increment amount' do
+ described_class.increment_gitops_sync(7)
+ described_class.increment_gitops_sync(2)
+ described_class.increment_gitops_sync(0)
+
+ expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 9)
+ end
+
+ it 'raises for negative numbers' do
+ expect { described_class.increment_gitops_sync(-1) }.to raise_error(ArgumentError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb
index be528b081c5..d4f6110b3df 100644
--- a/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/redis_counter_spec.rb
@@ -11,23 +11,47 @@ RSpec.describe Gitlab::UsageDataCounters::RedisCounter, :clean_gitlab_redis_shar
stub_application_setting(usage_ping_enabled: setting_value)
end
- context 'when usage_ping is disabled' do
- let(:setting_value) { false }
+ describe '.increment' do
+ context 'when usage_ping is disabled' do
+ let(:setting_value) { false }
+
+ it 'counter is not increased' do
+ expect do
+ subject.increment(redis_key)
+ end.not_to change { subject.total_count(redis_key) }
+ end
+ end
+
+ context 'when usage_ping is enabled' do
+ let(:setting_value) { true }
- it 'counter is not increased' do
- expect do
- subject.increment(redis_key)
- end.not_to change { subject.total_count(redis_key) }
+ it 'counter is increased' do
+ expect do
+ subject.increment(redis_key)
+ end.to change { subject.total_count(redis_key) }.by(1)
+ end
end
end
- context 'when usage_ping is enabled' do
- let(:setting_value) { true }
+ describe '.increment_by' do
+ context 'when usage_ping is disabled' do
+ let(:setting_value) { false }
+
+ it 'counter is not increased' do
+ expect do
+ subject.increment_by(redis_key, 3)
+ end.not_to change { subject.total_count(redis_key) }
+ end
+ end
+
+ context 'when usage_ping is enabled' do
+ let(:setting_value) { true }
- it 'counter is increased' do
- expect do
- subject.increment(redis_key)
- end.to change { subject.total_count(redis_key) }.by(1)
+ it 'counter is increased' do
+ expect do
+ subject.increment_by(redis_key, 3)
+ end.to change { subject.total_count(redis_key) }.by(3)
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb b/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
index bd348666729..8f5f1347ce8 100644
--- a/spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::UsageDataCounters::TrackUniqueActions, :clean_gitlab_redis_shared_state do
+RSpec.describe Gitlab::UsageDataCounters::TrackUniqueEvents, :clean_gitlab_redis_shared_state do
subject(:track_unique_events) { described_class }
let(:time) { Time.zone.now }
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueActions, :clean_gitlab_redi
end
def count_unique(params)
- track_unique_events.count_unique(params)
+ track_unique_events.count_unique_events(params)
end
context 'tracking an event' do
@@ -33,17 +33,14 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueActions, :clean_gitlab_redi
expect(track_event(event_action: :pushed, event_target: project, author_id: 2)).to be_truthy
expect(track_event(event_action: :pushed, event_target: project, author_id: 3)).to be_truthy
expect(track_event(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)).to be_truthy
- expect(track_event(event_action: :created, event_target: project, author_id: 5, time: time - 3.days)).to be_truthy
expect(track_event(event_action: :destroyed, event_target: design, author_id: 3)).to be_truthy
expect(track_event(event_action: :created, event_target: design, author_id: 4)).to be_truthy
expect(track_event(event_action: :updated, event_target: design, author_id: 5)).to be_truthy
- expect(track_event(event_action: :pushed, event_target: design, author_id: 6)).to be_truthy
expect(track_event(event_action: :destroyed, event_target: wiki, author_id: 5)).to be_truthy
expect(track_event(event_action: :created, event_target: wiki, author_id: 3)).to be_truthy
expect(track_event(event_action: :updated, event_target: wiki, author_id: 4)).to be_truthy
- expect(track_event(event_action: :pushed, event_target: wiki, author_id: 6)).to be_truthy
expect(count_unique(event_action: described_class::PUSH_ACTION, date_from: time, date_to: Date.today)).to eq(3)
expect(count_unique(event_action: described_class::PUSH_ACTION, date_from: time - 5.days, date_to: Date.tomorrow)).to eq(4)
@@ -58,17 +55,13 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueActions, :clean_gitlab_redi
context 'when tracking unsuccessfully' do
using RSpec::Parameterized::TableSyntax
- where(:application_setting, :target, :action) do
- true | Project | :invalid_action
- false | Project | :pushed
- true | :invalid_target | :pushed
+ where(:target, :action) do
+ Project | :invalid_action
+ :invalid_target | :pushed
+ Project | :created
end
with_them do
- before do
- stub_application_setting(usage_ping_enabled: application_setting)
- end
-
it 'returns the expected values' do
expect(track_event(event_action: action, event_target: target, author_id: 2)).to be_nil
expect(count_unique(event_action: described_class::PUSH_ACTION, date_from: time, date_to: Date.today)).to eq(0)
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
new file mode 100644
index 00000000000..7fc77593265
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataQueries do
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ describe '.count' do
+ it 'returns the raw SQL' do
+ expect(described_class.count(User)).to start_with('SELECT COUNT("users"."id") FROM "users"')
+ end
+ end
+
+ describe '.distinct_count' do
+ it 'returns the raw SQL' do
+ expect(described_class.distinct_count(Issue, :author_id)).to eq('SELECT COUNT(DISTINCT "issues"."author_id") FROM "issues"')
+ end
+ end
+
+ describe '.redis_usage_data' do
+ subject(:redis_usage_data) { described_class.redis_usage_data { 42 } }
+
+ it 'returns a class for redis_usage_data with a counter call' do
+ expect(described_class.redis_usage_data(Gitlab::UsageDataCounters::WikiPageCounter))
+ .to eq(redis_usage_data_counter: Gitlab::UsageDataCounters::WikiPageCounter)
+ end
+
+ it 'returns a stringified block for redis_usage_data with a block' do
+ is_expected.to include(:redis_usage_data_block)
+ expect(redis_usage_data[:redis_usage_data_block]).to start_with('#<Proc:')
+ end
+ end
+
+ describe '.sum' do
+ it 'returns the raw SQL' do
+ expect(described_class.sum(Issue, :weight)).to eq('SELECT SUM("issues"."weight") FROM "issues"')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 3be8a770b2b..6631a0d3cc6 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -12,19 +12,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe '.uncached_data' do
describe '.usage_activity_by_stage' do
- it 'includes usage_activity_by_stage data' do
- uncached_data = described_class.uncached_data
+ subject { described_class.uncached_data }
- expect(uncached_data).to include(:usage_activity_by_stage)
- expect(uncached_data).to include(:usage_activity_by_stage_monthly)
- expect(uncached_data[:usage_activity_by_stage])
+ it 'includes usage_activity_by_stage data' do
+ is_expected.to include(:usage_activity_by_stage)
+ is_expected.to include(:usage_activity_by_stage_monthly)
+ expect(subject[:usage_activity_by_stage])
.to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
- expect(uncached_data[:usage_activity_by_stage_monthly])
+ expect(subject[:usage_activity_by_stage_monthly])
.to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
end
it 'clears memoized values' do
values = %i(issue_minimum_id issue_maximum_id
+ project_minimum_id project_maximum_id
user_minimum_id user_maximum_id unique_visit_service
deployment_minimum_id deployment_maximum_id
approval_merge_request_rule_minimum_id
@@ -33,15 +34,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(described_class).to receive(:clear_memoization).with(key)
end
- described_class.uncached_data
+ subject
end
it 'merge_requests_users is included only in montly counters' do
- uncached_data = described_class.uncached_data
-
- expect(uncached_data[:usage_activity_by_stage][:create])
+ expect(subject[:usage_activity_by_stage][:create])
.not_to include(:merge_requests_users)
- expect(uncached_data[:usage_activity_by_stage_monthly][:create])
+ expect(subject[:usage_activity_by_stage_monthly][:create])
.to include(:merge_requests_users)
end
end
@@ -56,6 +55,21 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe 'usage_activity_by_stage_package' do
+ it 'includes accurate usage_activity_by_stage data' do
+ for_defined_days_back do
+ create(:project, packages: [create(:package)] )
+ end
+
+ expect(described_class.usage_activity_by_stage_package({})).to eq(
+ projects_with_packages: 2
+ )
+ expect(described_class.usage_activity_by_stage_package(described_class.last_28_days_time_period)).to eq(
+ projects_with_packages: 1
+ )
+ end
+ end
+
describe '.usage_activity_by_stage_configure' do
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
@@ -178,6 +192,58 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
)
end
+ it 'includes imports usage data' do
+ for_defined_days_back do
+ user = create(:user)
+
+ %w(gitlab_project gitlab github bitbucket bitbucket_server gitea git manifest fogbugz phabricator).each do |type|
+ create(:project, import_type: type, creator_id: user.id)
+ end
+
+ jira_project = create(:project, creator_id: user.id)
+ create(:jira_import_state, :finished, project: jira_project)
+ end
+
+ expect(described_class.usage_activity_by_stage_manage({})).to include(
+ {
+ projects_imported: {
+ gitlab_project: 2,
+ gitlab: 2,
+ github: 2,
+ bitbucket: 2,
+ bitbucket_server: 2,
+ gitea: 2,
+ git: 2,
+ manifest: 2
+ },
+ issues_imported: {
+ jira: 2,
+ fogbugz: 2,
+ phabricator: 2
+ }
+ }
+ )
+ expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
+ {
+ projects_imported: {
+ gitlab_project: 1,
+ gitlab: 1,
+ github: 1,
+ bitbucket: 1,
+ bitbucket_server: 1,
+ gitea: 1,
+ git: 1,
+ manifest: 1
+ },
+ issues_imported: {
+ jira: 1,
+ fogbugz: 1,
+ phabricator: 1
+ }
+ }
+ )
+ end
+
def omniauth_providers
[
OpenStruct.new(name: 'google_oauth2'),
@@ -218,6 +284,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:issue, project: project, author: User.support_bot)
create(:note, project: project, noteable: issue, author: user)
create(:todo, project: project, target: issue, author: user)
+ create(:jira_service, :jira_cloud_service, active: true, project: create(:project, :jira_dvcs_cloud, creator: user))
+ create(:jira_service, active: true, project: create(:project, :jira_dvcs_server, creator: user))
end
expect(described_class.usage_activity_by_stage_plan({})).to include(
@@ -226,7 +294,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects: 2,
todos: 2,
service_desk_enabled_projects: 2,
- service_desk_issues: 2
+ service_desk_issues: 2,
+ projects_jira_active: 2,
+ projects_jira_dvcs_cloud_active: 2,
+ projects_jira_dvcs_server_active: 2
)
expect(described_class.usage_activity_by_stage_plan(described_class.last_28_days_time_period)).to include(
issues: 2,
@@ -234,7 +305,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects: 1,
todos: 1,
service_desk_enabled_projects: 1,
- service_desk_issues: 1
+ service_desk_issues: 1,
+ projects_jira_active: 1,
+ projects_jira_dvcs_cloud_active: 1,
+ projects_jira_dvcs_server_active: 1
)
end
end
@@ -325,10 +399,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty
end
- it 'gathers usage counts monthly hash' do
- expect(subject[:counts_monthly]).to be_an(Hash)
- end
-
it 'gathers usage counts correctly' do
count_data = subject[:counts]
@@ -392,6 +462,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:clusters_applications_jupyter]).to eq(1)
expect(count_data[:clusters_applications_cilium]).to eq(1)
expect(count_data[:clusters_management_project]).to eq(1)
+ expect(count_data[:kubernetes_agents]).to eq(2)
+ expect(count_data[:kubernetes_agents_with_token]).to eq(1)
expect(count_data[:deployments]).to eq(4)
expect(count_data[:successful_deployments]).to eq(2)
@@ -399,6 +471,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:snippets]).to eq(6)
expect(count_data[:personal_snippets]).to eq(2)
expect(count_data[:project_snippets]).to eq(4)
+
+ expect(count_data[:projects_with_packages]).to eq(2)
+ expect(count_data[:packages]).to eq(4)
end
it 'gathers object store usage correctly' do
@@ -411,10 +486,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
)
end
- it 'gathers topology data' do
- expect(subject.keys).to include(:topology)
- end
-
context 'with existing container expiration policies' do
let_it_be(:disabled) { create(:container_expiration_policy, enabled: false) }
let_it_be(:enabled) { create(:container_expiration_policy, enabled: true) }
@@ -491,9 +562,16 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(counts_monthly[:snippets]).to eq(3)
expect(counts_monthly[:personal_snippets]).to eq(1)
expect(counts_monthly[:project_snippets]).to eq(2)
+ expect(counts_monthly[:packages]).to eq(3)
end
end
+ describe '.usage_counters' do
+ subject { described_class.usage_counters }
+
+ it { is_expected.to include(:kubernetes_agent_gitops_sync) }
+ end
+
describe '.usage_data_counters' do
subject { described_class.usage_data_counters }
@@ -592,6 +670,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:git][:version]).to eq(Gitlab::Git.version)
expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
expect(subject[:database][:version]).to eq(Gitlab::Database.version)
+ expect(subject[:mail][:smtp_server]).to eq(ActionMailer::Base.smtp_settings[:address])
expect(subject[:gitaly][:version]).to be_present
expect(subject[:gitaly][:servers]).to be >= 1
expect(subject[:gitaly][:clusters]).to be >= 0
@@ -878,24 +957,25 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- describe '.merge_requests_users' do
- let(:time_period) { { created_at: 2.days.ago..Time.current } }
- let(:merge_request) { create(:merge_request) }
- let(:other_user) { create(:user) }
- let(:another_user) { create(:user) }
+ describe '.merge_requests_users', :clean_gitlab_redis_shared_state do
+ let(:time_period) { { created_at: 2.days.ago..time } }
+ let(:time) { Time.current }
before do
- create(:event, target: merge_request, author: merge_request.author, created_at: 1.day.ago)
- create(:event, target: merge_request, author: merge_request.author, created_at: 1.hour.ago)
- create(:event, target: merge_request, author: merge_request.author, created_at: 3.days.ago)
- create(:event, target: merge_request, author: other_user, created_at: 1.day.ago)
- create(:event, target: merge_request, author: other_user, created_at: 1.hour.ago)
- create(:event, target: merge_request, author: other_user, created_at: 3.days.ago)
- create(:event, target: merge_request, author: another_user, created_at: 4.days.ago)
+ counter = Gitlab::UsageDataCounters::TrackUniqueEvents
+ merge_request = Event::TARGET_TYPES[:merge_request]
+ design = Event::TARGET_TYPES[:design]
+
+ counter.track_event(event_action: :commented, event_target: merge_request, author_id: 1, time: time)
+ counter.track_event(event_action: :opened, event_target: merge_request, author_id: 1, time: time)
+ counter.track_event(event_action: :merged, event_target: merge_request, author_id: 2, time: time)
+ counter.track_event(event_action: :closed, event_target: merge_request, author_id: 3, time: time)
+ counter.track_event(event_action: :opened, event_target: merge_request, author_id: 4, time: time - 3.days)
+ counter.track_event(event_action: :created, event_target: design, author_id: 5, time: time)
end
it 'returns the distinct count of users using merge requests (via events table) within the specified time period' do
- expect(described_class.merge_requests_users(time_period)).to eq(2)
+ expect(described_class.merge_requests_users(time_period)).to eq(3)
end
end
@@ -910,9 +990,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe '#action_monthly_active_users', :clean_gitlab_redis_shared_state do
let(:time_period) { { created_at: 2.days.ago..time } }
let(:time) { Time.zone.now }
+ let(:user1) { build(:user, id: 1) }
+ let(:user2) { build(:user, id: 2) }
+ let(:user3) { build(:user, id: 3) }
before do
- counter = Gitlab::UsageDataCounters::TrackUniqueActions
+ counter = Gitlab::UsageDataCounters::TrackUniqueEvents
project = Event::TARGET_TYPES[:project]
wiki = Event::TARGET_TYPES[:wiki]
design = Event::TARGET_TYPES[:design]
@@ -922,9 +1005,22 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
counter.track_event(event_action: :pushed, event_target: project, author_id: 2)
counter.track_event(event_action: :pushed, event_target: project, author_id: 3)
counter.track_event(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)
- counter.track_event(event_action: :created, event_target: project, author_id: 5, time: time - 3.days)
counter.track_event(event_action: :created, event_target: wiki, author_id: 3)
counter.track_event(event_action: :created, event_target: design, author_id: 3)
+
+ counter = Gitlab::UsageDataCounters::EditorUniqueCounter
+
+ counter.track_web_ide_edit_action(author: user1)
+ counter.track_web_ide_edit_action(author: user1)
+ counter.track_sfe_edit_action(author: user1)
+ counter.track_snippet_editor_edit_action(author: user1)
+ counter.track_snippet_editor_edit_action(author: user1, time: time - 3.days)
+
+ counter.track_web_ide_edit_action(author: user2)
+ counter.track_sfe_edit_action(author: user2)
+
+ counter.track_web_ide_edit_action(author: user3, time: time - 3.days)
+ counter.track_snippet_editor_edit_action(author: user3)
end
it 'returns the distinct count of user actions within the specified time period' do
@@ -932,7 +1028,11 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
{
action_monthly_active_users_design_management: 1,
action_monthly_active_users_project_repo: 3,
- action_monthly_active_users_wiki_repo: 1
+ action_monthly_active_users_wiki_repo: 1,
+ action_monthly_active_users_web_ide_edit: 2,
+ action_monthly_active_users_sfe_edit: 2,
+ action_monthly_active_users_snippet_editor_edit: 2,
+ action_monthly_active_users_ide_edit: 3
}
)
end
@@ -942,8 +1042,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.analytics_unique_visits_data }
it 'returns the number of unique visits to pages with analytics features' do
- ::Gitlab::Analytics::UniqueVisits.analytics_ids.each do |target_id|
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: target_id).and_return(123)
+ ::Gitlab::Analytics::UniqueVisits.analytics_events.each do |target|
+ expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: target).and_return(123)
end
expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics).and_return(543)
@@ -964,6 +1064,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
'p_analytics_repo' => 123,
'i_analytics_cohorts' => 123,
'i_analytics_dev_ops_score' => 123,
+ 'i_analytics_instance_statistics' => 123,
+ 'p_analytics_merge_request' => 123,
+ 'g_analytics_merge_request' => 123,
'analytics_unique_visits_for_any_target' => 543,
'analytics_unique_visits_for_any_target_monthly' => 987
}
@@ -978,8 +1081,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
described_class.clear_memoization(:unique_visit_service)
allow_next_instance_of(::Gitlab::Analytics::UniqueVisits) do |instance|
- ::Gitlab::Analytics::UniqueVisits.compliance_ids.each do |target_id|
- allow(instance).to receive(:unique_visits_for).with(targets: target_id).and_return(123)
+ ::Gitlab::Analytics::UniqueVisits.compliance_events.each do |target|
+ allow(instance).to receive(:unique_visits_for).with(targets: target).and_return(123)
end
allow(instance).to receive(:unique_visits_for).with(targets: :compliance).and_return(543)
@@ -995,6 +1098,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
'g_compliance_audit_events' => 123,
'i_compliance_credential_inventory' => 123,
'i_compliance_audit_events' => 123,
+ 'a_compliance_audit_events_api' => 123,
'compliance_unique_visits_for_any_target' => 543,
'compliance_unique_visits_for_any_target_monthly' => 987
}
@@ -1002,6 +1106,55 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe '.search_unique_visits_data' do
+ subject { described_class.search_unique_visits_data }
+
+ before do
+ described_class.clear_memoization(:unique_visit_service)
+ events = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category('search')
+ events.each do |event|
+ allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: event, start_date: 7.days.ago.to_date, end_date: Date.current).and_return(123)
+ end
+ allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: events, start_date: 7.days.ago.to_date, end_date: Date.current).and_return(543)
+ allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: events, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
+ end
+
+ it 'returns the number of unique visits to pages with search features' do
+ expect(subject).to eq({
+ search_unique_visits: {
+ 'i_search_total' => 123,
+ 'i_search_advanced' => 123,
+ 'i_search_paid' => 123,
+ 'search_unique_visits_for_any_target_weekly' => 543,
+ 'search_unique_visits_for_any_target_monthly' => 987
+ }
+ })
+ end
+ end
+
+ describe 'redis_hll_counters' do
+ subject { described_class.redis_hll_counters }
+
+ let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
+ let(:ineligible_total_categories) { ['source_code'] }
+
+ it 'has all know_events' do
+ expect(subject).to have_key(:redis_hll_counters)
+
+ expect(subject[:redis_hll_counters].keys).to match_array(categories)
+
+ categories.each do |category|
+ keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
+
+ if ineligible_total_categories.exclude?(category)
+ keys.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
+ end
+
+ expect(subject[:redis_hll_counters][category].keys).to match_array(keys)
+ end
+ end
+ end
+
describe '.service_desk_counts' do
subject { described_class.send(:service_desk_counts) }
@@ -1014,4 +1167,46 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
service_desk_issues: 2)
end
end
+
+ describe '.snowplow_event_counts' do
+ context 'when self-monitoring project exists' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ stub_application_setting(self_monitoring_project: project)
+ end
+
+ context 'and product_analytics FF is enabled for it' do
+ before do
+ stub_feature_flags(product_analytics: project)
+
+ create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
+ create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 28.days.ago)
+ end
+
+ it 'returns promoted_issues for the time period' do
+ expect(described_class.snowplow_event_counts[:promoted_issues]).to eq(2)
+ expect(described_class.snowplow_event_counts(
+ time_period: described_class.last_28_days_time_period(column: :collector_tstamp)
+ )[:promoted_issues]).to eq(1)
+ end
+ end
+
+ context 'and product_analytics FF is disabled' do
+ before do
+ stub_feature_flags(product_analytics: false)
+ end
+
+ it 'returns an empty hash' do
+ expect(described_class.snowplow_event_counts).to eq({})
+ end
+ end
+ end
+
+ context 'when self-monitoring project does not exist' do
+ it 'returns an empty hash' do
+ expect(described_class.snowplow_event_counts).to eq({})
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils/gzip_spec.rb b/spec/lib/gitlab/utils/gzip_spec.rb
new file mode 100644
index 00000000000..5d1c62e03d3
--- /dev/null
+++ b/spec/lib/gitlab/utils/gzip_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Utils::Gzip do
+ before do
+ example_class = Class.new do
+ include Gitlab::Utils::Gzip
+
+ def lorem_ipsum
+ "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod "\
+ "tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim "\
+ "veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea "\
+ "commodo consequat. Duis aute irure dolor in reprehenderit in voluptate "\
+ "velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat "\
+ "cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id "\
+ "est laborum."
+ end
+ end
+
+ stub_const('ExampleClass', example_class)
+ end
+
+ subject { ExampleClass.new }
+
+ let(:sample_string) { subject.lorem_ipsum }
+ let(:compressed_string) { subject.gzip_compress(sample_string) }
+
+ describe "#gzip_compress" do
+ it "compresses data passed to it" do
+ expect(compressed_string.length).to be < sample_string.length
+ end
+
+ it "returns uncompressed data when encountering Zlib::GzipFile::Error" do
+ expect(ActiveSupport::Gzip).to receive(:compress).and_raise(Zlib::GzipFile::Error)
+
+ expect(compressed_string.length).to eq sample_string.length
+ end
+ end
+
+ describe "#gzip_decompress" do
+ let(:decompressed_string) { subject.gzip_decompress(compressed_string) }
+
+ it "decompresses encoded data" do
+ expect(decompressed_string).to eq sample_string
+ end
+
+ it "returns compressed data when encountering Zlib::GzipFile::Error" do
+ expect(ActiveSupport::Gzip).to receive(:decompress).and_raise(Zlib::GzipFile::Error)
+
+ expect(decompressed_string).not_to eq sample_string.length
+ end
+
+ it "returns unmodified data when it is determined to be uncompressed" do
+ expect(subject.gzip_decompress(sample_string)).to eq sample_string
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils/markdown_spec.rb b/spec/lib/gitlab/utils/markdown_spec.rb
index 001ff5bc487..93d91f7ed90 100644
--- a/spec/lib/gitlab/utils/markdown_spec.rb
+++ b/spec/lib/gitlab/utils/markdown_spec.rb
@@ -52,6 +52,38 @@ RSpec.describe Gitlab::Utils::Markdown do
end
end
+ context 'when string has a product suffix' do
+ let(:string) { 'My Header (ULTIMATE)' }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
+
+ context 'with only modifier' do
+ let(:string) { 'My Header (STARTER ONLY)' }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
+ end
+
+ context 'with "*" around a product suffix' do
+ let(:string) { 'My Header **(STARTER)**' }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
+ end
+
+ context 'with "*" around a product suffix and only modifier' do
+ let(:string) { 'My Header **(STARTER ONLY)**' }
+
+ it 'ignores a product suffix' do
+ is_expected.to eq 'my-header'
+ end
+ end
+ end
+
context 'when string is empty' do
let(:string) { '' }
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 4675cbd7fa1..362cbaa78e9 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -37,6 +37,28 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
+ describe '#sum' do
+ let(:relation) { double(:relation) }
+
+ it 'returns the count when counting succeeds' do
+ allow(Gitlab::Database::BatchCount)
+ .to receive(:batch_sum)
+ .with(relation, :column, batch_size: 100, start: 2, finish: 3)
+ .and_return(1)
+
+ expect(described_class.sum(relation, :column, batch_size: 100, start: 2, finish: 3)).to eq(1)
+ end
+
+ it 'returns the fallback value when counting fails' do
+ stub_const("Gitlab::Utils::UsageData::FALLBACK", 15)
+ allow(Gitlab::Database::BatchCount)
+ .to receive(:batch_sum)
+ .and_raise(ActiveRecord::StatementInvalid.new(''))
+
+ expect(described_class.sum(relation, :column)).to eq(15)
+ end
+ end
+
describe '#alt_usage_data' do
it 'returns the fallback when it gets an error' do
expect(described_class.alt_usage_data { raise StandardError } ).to eq(-1)
@@ -76,26 +98,19 @@ RSpec.describe Gitlab::Utils::UsageData do
end
describe '#with_prometheus_client' do
- context 'when Prometheus is enabled' do
+ shared_examples 'query data from Prometheus' do
it 'yields a client instance and returns the block result' do
- expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
- expect(Gitlab::Prometheus::Internal).to receive(:uri).and_return('http://prom:9090')
-
result = described_class.with_prometheus_client { |client| client }
expect(result).to be_an_instance_of(Gitlab::PrometheusClient)
end
end
- context 'when Prometheus is disabled' do
- before do
- expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
- end
-
+ shared_examples 'does not query data from Prometheus' do
it 'returns nil by default' do
result = described_class.with_prometheus_client { |client| client }
- expect(result).to be nil
+ expect(result).to be_nil
end
it 'returns fallback if provided' do
@@ -104,6 +119,74 @@ RSpec.describe Gitlab::Utils::UsageData do
expect(result).to eq([])
end
end
+
+ shared_examples 'try to query Prometheus with given address' do
+ context 'Prometheus is ready' do
+ before do
+ stub_request(:get, /\/-\/ready/)
+ .to_return(status: 200, body: 'Prometheus is Ready.\n')
+ end
+
+ context 'Prometheus is reachable through HTTPS' do
+ it_behaves_like 'query data from Prometheus'
+ end
+
+ context 'Prometheus is not reachable through HTTPS' do
+ before do
+ stub_request(:get, /https:\/\/.*/).to_raise(Errno::ECONNREFUSED)
+ end
+
+ context 'Prometheus is reachable through HTTP' do
+ it_behaves_like 'query data from Prometheus'
+ end
+
+ context 'Prometheus is not reachable through HTTP' do
+ before do
+ stub_request(:get, /http:\/\/.*/).to_raise(Errno::ECONNREFUSED)
+ end
+
+ it_behaves_like 'does not query data from Prometheus'
+ end
+ end
+ end
+
+ context 'Prometheus is not ready' do
+ before do
+ stub_request(:get, /\/-\/ready/)
+ .to_return(status: 503, body: 'Service Unavailable')
+ end
+
+ it_behaves_like 'does not query data from Prometheus'
+ end
+ end
+
+ context 'when Prometheus server address is available from settings' do
+ before do
+ expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(true)
+ expect(Gitlab::Prometheus::Internal).to receive(:server_address).and_return('prom:9090')
+ end
+
+ it_behaves_like 'try to query Prometheus with given address'
+ end
+
+ context 'when Prometheus server address is available from Consul service discovery' do
+ before do
+ expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
+ expect(Gitlab::Consul::Internal).to receive(:api_url).and_return('http://localhost:8500')
+ expect(Gitlab::Consul::Internal).to receive(:discover_prometheus_server_address).and_return('prom:9090')
+ end
+
+ it_behaves_like 'try to query Prometheus with given address'
+ end
+
+ context 'when Prometheus server address is not available' do
+ before do
+ expect(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
+ expect(Gitlab::Consul::Internal).to receive(:api_url).and_return(nil)
+ end
+
+ it_behaves_like 'does not query data from Prometheus'
+ end
end
describe '#measure_duration' do
@@ -126,4 +209,50 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
end
+
+ describe '#track_usage_event' do
+ let(:value) { '9f302fea-f828-4ca9-aef4-e10bd723c0b3' }
+ let(:event_name) { 'my_event' }
+ let(:unknown_event) { 'unknown' }
+ let(:feature) { "usage_data_#{event_name}" }
+
+ context 'with feature enabled' do
+ before do
+ stub_feature_flags(feature => true)
+ end
+
+ it 'tracks redis hll event' do
+ stub_application_setting(usage_ping_enabled: true)
+
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(value, event_name)
+
+ described_class.track_usage_event(event_name, value)
+ end
+
+ it 'does not track event when usage ping is not enabled' do
+ stub_application_setting(usage_ping_enabled: false)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ described_class.track_usage_event(event_name, value)
+ end
+
+ it 'raise an error for unknown event' do
+ stub_application_setting(usage_ping_enabled: true)
+
+ expect { described_class.track_usage_event(unknown_event, value) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
+ end
+ end
+
+ context 'with feature disabled' do
+ before do
+ stub_feature_flags(feature => false)
+ end
+
+ it 'does not track event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
+ described_class.track_usage_event(event_name, value)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/web_ide/config/entry/global_spec.rb b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
index 3a50667163b..3e29bf89785 100644
--- a/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
+++ b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
@@ -12,8 +12,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Global do
context 'when filtering all the entry/node names' do
it 'contains the expected node names' do
- expect(described_class.nodes.keys)
- .to match_array(%i[terminal])
+ expect(described_class.nodes.keys).to match_array(described_class.allowed_keys)
end
end
end
@@ -34,7 +33,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Global do
end
it 'creates node object for each entry' do
- expect(global.descendants.count).to eq 1
+ expect(global.descendants.count).to eq described_class.allowed_keys.length
end
it 'creates node object using valid class' do
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index da327ce7706..e9733851590 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Workhorse do
context "when the repository doesn't have an archive file path" do
before do
- allow(project.repository).to receive(:archive_metadata).and_return(Hash.new)
+ allow(project.repository).to receive(:archive_metadata).and_return({})
end
it "raises an error" do
@@ -424,8 +424,9 @@ RSpec.describe Gitlab::Workhorse do
describe '.send_scaled_image' do
let(:location) { 'http://example.com/avatar.png' }
let(:width) { '150' }
+ let(:content_type) { 'image/png' }
- subject { described_class.send_scaled_image(location, width) }
+ subject { described_class.send_scaled_image(location, width, content_type) }
it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject)
@@ -434,7 +435,8 @@ RSpec.describe Gitlab::Workhorse do
expect(command).to eq("send-scaled-img")
expect(params).to eq({
'Location' => location,
- 'Width' => width
+ 'Width' => width,
+ 'ContentType' => content_type
}.deep_stringify_keys)
end
end