summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-07-06 15:08:42 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-07-06 15:08:42 +0000
commitce06ce825b9ef5204a84aaa37d0dfc7742da5037 (patch)
tree2568846a1101580dd1b1f522526378bce7d1357f
parent9fc7cdf0b7a3c8c3528ec930f59fde110683d8fd (diff)
downloadgitlab-ce-ce06ce825b9ef5204a84aaa37d0dfc7742da5037.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/assets/javascripts/issuable_bulk_update_sidebar.js10
-rw-r--r--app/models/ci/job_artifact.rb17
-rw-r--r--app/models/plan_limits.rb33
-rw-r--r--app/models/project_statistics.rb7
-rw-r--r--app/serializers/pipeline_entity.rb4
-rw-r--r--app/services/ci/authorize_job_artifact_service.rb53
-rw-r--r--app/services/ci/create_job_artifacts_service.rb116
-rwxr-xr-xbin/changelog21
-rwxr-xr-xbin/feature-flag291
-rw-r--r--changelogs/unreleased/eb-report-file-size-mechanism.yml5
-rw-r--r--changelogs/unreleased/remove-dead-elasticsearch-indexing-code.yml5
-rw-r--r--config/sidekiq_queues.yml2
-rw-r--r--db/migrate/20200616124338_add_plan_limits_for_max_size_per_artifact_type.rb48
-rw-r--r--db/migrate/20200706005325_remove_elastic_batch_project_indexer_worker_queue.rb11
-rw-r--r--db/structure.sql28
-rw-r--r--doc/development/what_requires_downtime.md15
-rw-r--r--doc/integration/elasticsearch.md6
-rw-r--r--lib/api/ci/runner.rb26
-rw-r--r--lib/api/helpers/runner.rb5
-rw-r--r--lib/gitlab/database/migration_helpers.rb18
-rw-r--r--lib/gitlab/metrics/web_transaction.rb5
-rw-r--r--package.json4
-rw-r--r--spec/bin/feature_flag_spec.rb191
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb25
-rw-r--r--spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap2
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb37
-rw-r--r--spec/lib/gitlab/metrics/web_transaction_spec.rb22
-rw-r--r--spec/models/ci/job_artifact_spec.rb96
-rw-r--r--spec/models/plan_limits_spec.rb205
-rw-r--r--spec/models/project_statistics_spec.rb20
-rw-r--r--spec/requests/api/ci/runner_spec.rb189
-rw-r--r--spec/serializers/pipeline_entity_spec.rb24
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb32
-rw-r--r--spec/services/ci/create_job_artifacts_service_spec.rb10
-rw-r--r--yarn.lock18
35 files changed, 1321 insertions, 280 deletions
diff --git a/app/assets/javascripts/issuable_bulk_update_sidebar.js b/app/assets/javascripts/issuable_bulk_update_sidebar.js
index 3ff3646edd7..8b38b38f814 100644
--- a/app/assets/javascripts/issuable_bulk_update_sidebar.js
+++ b/app/assets/javascripts/issuable_bulk_update_sidebar.js
@@ -7,8 +7,6 @@ import MilestoneSelect from './milestone_select';
import issueStatusSelect from './issue_status_select';
import subscriptionSelect from './subscription_select';
import LabelsSelect from './labels_select';
-import HealthStatusSelect from 'ee_else_ce/vue_shared/components/sidebar/health_status_select/health_status_bundle';
-
import issueableEventHub from './issuables_list/eventhub';
const HIDDEN_CLASS = 'hidden';
@@ -66,8 +64,12 @@ export default class IssuableBulkUpdateSidebar {
issueStatusSelect();
subscriptionSelect();
- if (HealthStatusSelect) {
- HealthStatusSelect();
+ if (IS_EE) {
+ import('ee/vue_shared/components/sidebar/health_status_select/health_status_bundle')
+ .then(({ default: HealthStatusSelect }) => {
+ HealthStatusSelect();
+ })
+ .catch(() => {});
}
}
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 73199817ce5..8bad9303046 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -100,6 +100,8 @@ module Ci
TYPE_AND_FORMAT_PAIRS = INTERNAL_TYPES.merge(REPORT_TYPES).freeze
+ PLAN_LIMIT_PREFIX = 'ci_max_artifact_size_'
+
# This is required since we cannot add a default to the database
# https://gitlab.com/gitlab-org/gitlab/-/issues/215418
attribute :locked, :boolean, default: false
@@ -289,6 +291,21 @@ module Ci
where(job_id: job_id).trace.take&.file&.file&.exists?
end
+ def self.max_artifact_size(type:, project:)
+ max_size = if Feature.enabled?(:ci_max_artifact_size_per_type, project, default_enabled: false)
+ limit_name = "#{PLAN_LIMIT_PREFIX}#{type}"
+
+ project.actual_limits.limit_for(
+ limit_name,
+ alternate_limit: -> { project.closest_setting(:max_artifacts_size) }
+ )
+ else
+ project.closest_setting(:max_artifacts_size)
+ end
+
+ max_size&.megabytes.to_i
+ end
+
private
def file_format_adapter_class
diff --git a/app/models/plan_limits.rb b/app/models/plan_limits.rb
index 575105cfd79..f17078c0cab 100644
--- a/app/models/plan_limits.rb
+++ b/app/models/plan_limits.rb
@@ -1,23 +1,36 @@
# frozen_string_literal: true
class PlanLimits < ApplicationRecord
+ LimitUndefinedError = Class.new(StandardError)
+
belongs_to :plan
- def exceeded?(limit_name, object)
- return false unless enabled?(limit_name)
+ def exceeded?(limit_name, subject, alternate_limit: 0)
+ limit = limit_for(limit_name, alternate_limit: alternate_limit)
+ return false unless limit
- if object.is_a?(Integer)
- object >= read_attribute(limit_name)
- else
- # object.count >= limit value is slower than checking
+ case subject
+ when Integer
+ subject >= limit
+ when ActiveRecord::Relation
+ # We intentionally not accept just plain ApplicationRecord classes to
+ # enforce the subject to be scoped down to a relation first.
+ #
+ # subject.count >= limit value is slower than checking
# if a record exists at the limit value - 1 position.
- object.offset(read_attribute(limit_name) - 1).exists?
+ subject.offset(limit - 1).exists?
+ else
+ raise ArgumentError, "#{subject.class} is not supported as a limit value"
end
end
- private
+ def limit_for(limit_name, alternate_limit: 0)
+ limit = read_attribute(limit_name)
+ raise LimitUndefinedError, "The limit `#{limit_name}` is undefined" if limit.nil?
+
+ alternate_limit = alternate_limit.call if alternate_limit.respond_to?(:call)
- def enabled?(limit_name)
- read_attribute(limit_name) > 0
+ limits = [limit, alternate_limit]
+ limits.map(&:to_i).select(&:positive?).min
end
end
diff --git a/app/models/project_statistics.rb b/app/models/project_statistics.rb
index 5424abd069f..1d8e72c902a 100644
--- a/app/models/project_statistics.rb
+++ b/app/models/project_statistics.rb
@@ -14,7 +14,6 @@ class ProjectStatistics < ApplicationRecord
COLUMNS_TO_REFRESH = [:repository_size, :wiki_size, :lfs_objects_size, :commit_count, :snippets_size].freeze
INCREMENTABLE_COLUMNS = { build_artifacts_size: %i[storage_size], packages_size: %i[storage_size] }.freeze
NAMESPACE_RELATABLE_COLUMNS = [:repository_size, :wiki_size, :lfs_objects_size].freeze
- FLAGGED_NAMESPACE_RELATABLE_COLUMNS = [*NAMESPACE_RELATABLE_COLUMNS, :snippets_size].freeze
scope :for_project_ids, ->(project_ids) { where(project_id: project_ids) }
@@ -32,7 +31,7 @@ class ProjectStatistics < ApplicationRecord
end
end
- if only.empty? || only.any? { |column| namespace_relatable_columns.include?(column) }
+ if only.empty? || only.any? { |column| NAMESPACE_RELATABLE_COLUMNS.include?(column) }
schedule_namespace_aggregation_worker
end
@@ -111,10 +110,6 @@ class ProjectStatistics < ApplicationRecord
Namespaces::ScheduleAggregationWorker.perform_async(project.namespace_id)
end
end
-
- def namespace_relatable_columns
- Feature.enabled?(:namespace_snippets_size_stat) ? FLAGGED_NAMESPACE_RELATABLE_COLUMNS : NAMESPACE_RELATABLE_COLUMNS
- end
end
ProjectStatistics.prepend_if_ee('EE::ProjectStatistics')
diff --git a/app/serializers/pipeline_entity.rb b/app/serializers/pipeline_entity.rb
index c3ddbb88c9c..8333a0bb863 100644
--- a/app/serializers/pipeline_entity.rb
+++ b/app/serializers/pipeline_entity.rb
@@ -85,6 +85,10 @@ class PipelineEntity < Grape::Entity
pipeline.failed_builds
end
+ expose :tests_total_count, if: -> (pipeline, _) { Feature.enabled?(:build_report_summary, pipeline.project) } do |pipeline|
+ pipeline.test_report_summary.total_count
+ end
+
private
alias_method :pipeline, :object
diff --git a/app/services/ci/authorize_job_artifact_service.rb b/app/services/ci/authorize_job_artifact_service.rb
deleted file mode 100644
index 893e92d427c..00000000000
--- a/app/services/ci/authorize_job_artifact_service.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- class AuthorizeJobArtifactService
- include Gitlab::Utils::StrongMemoize
-
- # Max size of the zipped LSIF artifact
- LSIF_ARTIFACT_MAX_SIZE = 20.megabytes
- LSIF_ARTIFACT_TYPE = 'lsif'
-
- def initialize(job, params, max_size:)
- @job = job
- @max_size = max_size
- @size = params[:filesize]
- @type = params[:artifact_type].to_s
- end
-
- def forbidden?
- lsif? && !code_navigation_enabled?
- end
-
- def too_large?
- size && max_size <= size.to_i
- end
-
- def headers
- default_headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size)
- default_headers.tap do |h|
- h[:ProcessLsif] = true if lsif? && code_navigation_enabled?
- end
- end
-
- private
-
- attr_reader :job, :size, :type
-
- def code_navigation_enabled?
- strong_memoize(:code_navigation_enabled) do
- Feature.enabled?(:code_navigation, job.project, default_enabled: true)
- end
- end
-
- def lsif?
- strong_memoize(:lsif) do
- type == LSIF_ARTIFACT_TYPE
- end
- end
-
- def max_size
- lsif? ? LSIF_ARTIFACT_MAX_SIZE : @max_size.to_i
- end
- end
-end
diff --git a/app/services/ci/create_job_artifacts_service.rb b/app/services/ci/create_job_artifacts_service.rb
index f0ffe67510b..a8b504e42bc 100644
--- a/app/services/ci/create_job_artifacts_service.rb
+++ b/app/services/ci/create_job_artifacts_service.rb
@@ -3,42 +3,100 @@
module Ci
class CreateJobArtifactsService < ::BaseService
ArtifactsExistError = Class.new(StandardError)
+
+ LSIF_ARTIFACT_TYPE = 'lsif'
+
OBJECT_STORAGE_ERRORS = [
Errno::EIO,
Google::Apis::ServerError,
Signet::RemoteServerError
].freeze
- def execute(job, artifacts_file, params, metadata_file: nil)
- return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file)
+ def initialize(job)
+ @job = job
+ @project = job.project
+ end
+
+ def authorize(artifact_type:, filesize: nil)
+ result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
+ return result unless result[:status] == :success
+
+ headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
+ headers[:ProcessLsif] = true if lsif?(artifact_type)
- artifact, artifact_metadata = build_artifact(job, artifacts_file, params, metadata_file)
- result = parse_artifact(job, artifact)
+ success(headers: headers)
+ end
+ def execute(artifacts_file, params, metadata_file: nil)
+ result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
return result unless result[:status] == :success
- persist_artifact(job, artifact, artifact_metadata)
+ return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
+
+ artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
+ result = parse_artifact(artifact)
+
+ return result unless result[:status] == :success
+
+ persist_artifact(artifact, artifact_metadata, params)
end
private
- def build_artifact(job, artifacts_file, params, metadata_file)
+ attr_reader :job, :project
+
+ def validate_requirements(artifact_type:, filesize:)
+ return forbidden_type_error(artifact_type) if forbidden_type?(artifact_type)
+ return too_large_error if too_large?(artifact_type, filesize)
+
+ success
+ end
+
+ def forbidden_type?(type)
+ lsif?(type) && !code_navigation_enabled?
+ end
+
+ def too_large?(type, size)
+ size > max_size(type) if size
+ end
+
+ def code_navigation_enabled?
+ Feature.enabled?(:code_navigation, project, default_enabled: true)
+ end
+
+ def lsif?(type)
+ type == LSIF_ARTIFACT_TYPE
+ end
+
+ def max_size(type)
+ Ci::JobArtifact.max_artifact_size(type: type, project: project)
+ end
+
+ def forbidden_type_error(type)
+ error("#{type} artifacts are forbidden", :forbidden)
+ end
+
+ def too_large_error
+ error('file size has reached maximum size limit', :payload_too_large)
+ end
+
+ def build_artifact(artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new(
job_id: job.id,
- project: job.project,
+ project: project,
file: artifacts_file,
- file_type: params['artifact_type'],
- file_format: params['artifact_format'],
+ file_type: params[:artifact_type],
+ file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
- project: job.project,
+ project: project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
@@ -46,7 +104,7 @@ module Ci
expire_in: expire_in)
end
- if Feature.enabled?(:keep_latest_artifact_for_ref, job.project)
+ if Feature.enabled?(:keep_latest_artifact_for_ref, project)
artifact.locked = true
artifact_metadata&.locked = true
end
@@ -54,23 +112,23 @@ module Ci
[artifact, artifact_metadata]
end
- def parse_artifact(job, artifact)
- unless Feature.enabled?(:ci_synchronous_artifact_parsing, job.project, default_enabled: true)
+ def parse_artifact(artifact)
+ unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
return success
end
case artifact.file_type
- when 'dotenv' then parse_dotenv_artifact(job, artifact)
- when 'cluster_applications' then parse_cluster_applications_artifact(job, artifact)
+ when 'dotenv' then parse_dotenv_artifact(artifact)
+ when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success
end
end
- def persist_artifact(job, artifact, artifact_metadata)
+ def persist_artifact(artifact, artifact_metadata, params)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
- unlock_previous_artifacts!(artifact)
+ unlock_previous_artifacts!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
@@ -78,42 +136,42 @@ module Ci
success
rescue ActiveRecord::RecordNotUnique => error
- track_exception(error, job, params)
+ track_exception(error, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
- track_exception(error, job, params)
+ track_exception(error, params)
error(error.message, :service_unavailable)
rescue => error
- track_exception(error, job, params)
+ track_exception(error, params)
error(error.message, :bad_request)
end
- def unlock_previous_artifacts!(artifact)
- return unless Feature.enabled?(:keep_latest_artifact_for_ref, artifact.job.project)
+ def unlock_previous_artifacts!
+ return unless Feature.enabled?(:keep_latest_artifact_for_ref, project)
- Ci::JobArtifact.for_ref(artifact.job.ref, artifact.project_id).locked.update_all(locked: false)
+ Ci::JobArtifact.for_ref(job.ref, project.id).locked.update_all(locked: false)
end
- def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file)
+ def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256
end
- def track_exception(error, job, params)
+ def track_exception(error, params)
Gitlab::ErrorTracking.track_exception(error,
job_id: job.id,
project_id: job.project_id,
- uploading_type: params['artifact_type']
+ uploading_type: params[:artifact_type]
)
end
- def parse_dotenv_artifact(job, artifact)
- Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact)
+ def parse_dotenv_artifact(artifact)
+ Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end
- def parse_cluster_applications_artifact(job, artifact)
+ def parse_cluster_applications_artifact(artifact)
Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
end
end
diff --git a/bin/changelog b/bin/changelog
index 45b6295e331..bdf159a0a22 100755
--- a/bin/changelog
+++ b/bin/changelog
@@ -8,16 +8,6 @@
require 'optparse'
require 'yaml'
-Options = Struct.new(
- :amend,
- :author,
- :dry_run,
- :force,
- :merge_request,
- :title,
- :type,
- :ee
-)
INVALID_TYPE = -1
module ChangelogHelpers
@@ -40,6 +30,17 @@ end
class ChangelogOptionParser
extend ChangelogHelpers
+ Options = Struct.new(
+ :amend,
+ :author,
+ :dry_run,
+ :force,
+ :merge_request,
+ :title,
+ :type,
+ :ee
+ )
+
Type = Struct.new(:name, :description)
TYPES = [
Type.new('added', 'New feature'),
diff --git a/bin/feature-flag b/bin/feature-flag
new file mode 100755
index 00000000000..46d93a11ebd
--- /dev/null
+++ b/bin/feature-flag
@@ -0,0 +1,291 @@
+#!/usr/bin/env ruby
+#
+# Generate a feature flag entry file in the correct location.
+#
+# Automatically stages the file and amends the previous commit if the `--amend`
+# argument is used.
+
+require 'optparse'
+require 'yaml'
+require 'fileutils'
+require 'cgi'
+
+require_relative '../lib/feature/shared' unless defined?(Feature::Shared)
+
+module FeatureFlagHelpers
+ Abort = Class.new(StandardError)
+ Done = Class.new(StandardError)
+
+ def capture_stdout(cmd)
+ output = IO.popen(cmd, &:read)
+ fail_with "command failed: #{cmd.join(' ')}" unless $?.success?
+ output
+ end
+
+ def fail_with(message)
+ raise Abort, "\e[31merror\e[0m #{message}"
+ end
+end
+
+class FeatureFlagOptionParser
+ extend FeatureFlagHelpers
+ extend ::Feature::Shared
+
+ Options = Struct.new(
+ :name,
+ :type,
+ :group,
+ :ee,
+ :amend,
+ :dry_run,
+ :force,
+ :introduced_by_url,
+ :rollout_issue_url
+ )
+
+ class << self
+ def parse(argv)
+ options = Options.new
+
+ parser = OptionParser.new do |opts|
+ opts.banner = "Usage: #{__FILE__} [options] <feature-flag>\n\n"
+
+ # Note: We do not provide a shorthand for this in order to match the `git
+ # commit` interface
+ opts.on('--amend', 'Amend the previous commit') do |value|
+ options.amend = value
+ end
+
+ opts.on('-f', '--force', 'Overwrite an existing entry') do |value|
+ options.force = value
+ end
+
+ opts.on('-m', '--introduced-by-url [string]', String, 'URL to Merge Request introducing Feature Flag') do |value|
+ options.introduced_by_url = value
+ end
+
+ opts.on('-i', '--rollout-issue-url [string]', String, 'URL to Issue rolling out Feature Flag') do |value|
+ options.rollout_issue_url = value
+ end
+
+ opts.on('-n', '--dry-run', "Don't actually write anything, just print") do |value|
+ options.dry_run = value
+ end
+
+ opts.on('-g', '--group [string]', String, "The group introducing a feature flag, like: `group::apm`") do |value|
+ options.group = value if value.start_with?('group::')
+ end
+
+ opts.on('-t', '--type [string]', String, "The category of the feature flag, valid options are: #{TYPES.keys.map(&:to_s).join(', ')}") do |value|
+ options.type = value.to_sym if TYPES[value.to_sym]
+ end
+
+ opts.on('-e', '--ee', 'Generate a feature flag entry for GitLab EE') do |value|
+ options.ee = value
+ end
+
+ opts.on('-h', '--help', 'Print help message') do
+ $stdout.puts opts
+ raise Done.new
+ end
+ end
+
+ parser.parse!(argv)
+
+ unless argv.one?
+ $stdout.puts parser.help
+ $stdout.puts
+ raise Abort, 'Feature flag name is required'
+ end
+
+ # Name is a first name
+ options.name = argv.first
+
+ options
+ end
+
+ def read_group
+ $stdout.puts ">> Please specify the group introducing feature flag, like `group::apm`:"
+
+ loop do
+ $stdout.print "\n?> "
+ group = $stdin.gets.strip
+ group = nil if group.empty?
+ return group if group.nil? || group.start_with?('group::')
+
+ $stderr.puts "Group needs to include `group::`"
+ end
+ end
+
+ def read_type
+ $stdout.puts ">> Please specify the type of your feature flag:"
+ $stdout.puts
+ TYPES.each do |type, data|
+ $stdout.puts "#{type.to_s.rjust(15)}#{' '*6}#{data[:description]}"
+ end
+
+ loop do
+ $stdout.print "\n?> "
+
+ type = $stdin.gets.strip.to_sym
+ return type if TYPES[type]
+
+ $stderr.puts "Invalid type specified '#{type}'"
+ end
+ end
+
+ def read_issue_url(options)
+ return unless TYPES.dig(options.type, :rollout_issue)
+
+ url = "https://gitlab.com/gitlab-org/gitlab/-/issues/new"
+ title = "[Feature flag] Rollout of `#{options.name}`"
+ description = File.read('.gitlab/issue_templates/Feature Flag Roll Out.md')
+ description.sub!(':feature_name', options.name)
+
+ issue_new_url = url + "?" +
+ "issue[title]=" + CGI.escape(title) + "&"
+ # TODO: We should be able to pick `issueable_template`
+ # + "issue[description]=" + CGI.escape(description)
+
+ $stdout.puts ">> Open this URL and fill the rest of details:"
+ $stdout.puts issue_new_url
+ $stdout.puts
+
+ $stdout.puts ">> Paste URL here, or enter to skip:"
+
+ loop do
+ $stdout.print "\n?> "
+ created_url = $stdin.gets.strip
+ created_url = nil if created_url.empty?
+ return created_url if created_url.nil? || created_url.start_with?('https://')
+
+ $stderr.puts "URL needs to start with https://"
+ end
+ end
+ end
+end
+
+class FeatureFlagCreator
+ include FeatureFlagHelpers
+
+ attr_reader :options
+
+ def initialize(options)
+ @options = options
+ end
+
+ def execute
+ assert_feature_branch!
+ assert_name!
+ assert_existing_feature_flag!
+
+ # Read type from $stdin unless is already set
+ options.type ||= FeatureFlagOptionParser.read_type
+ options.group ||= FeatureFlagOptionParser.read_group
+ options.rollout_issue_url ||= FeatureFlagOptionParser.read_issue_url(options)
+
+ $stdout.puts "\e[32mcreate\e[0m #{file_path}"
+ $stdout.puts contents
+
+ unless options.dry_run
+ write
+ amend_commit if options.amend
+ end
+
+ if editor
+ system("#{editor} '#{file_path}'")
+ end
+ end
+
+ private
+
+ def contents
+ YAML.dump(
+ 'name' => options.name,
+ 'introduced_by_url' => options.introduced_by_url,
+ 'rollout_issue_url' => options.rollout_issue_url,
+ 'group' => options.group.to_s,
+ 'type' => options.type.to_s,
+ 'default_enabled' => false
+ ).strip
+ end
+
+ def write
+ FileUtils.mkdir_p(File.dirname(file_path))
+ File.write(file_path, contents)
+ end
+
+ def editor
+ ENV['EDITOR']
+ end
+
+ def amend_commit
+ fail_with "git add failed" unless system(*%W[git add #{file_path}])
+
+ Kernel.exec(*%w[git commit --amend])
+ end
+
+ def assert_feature_branch!
+ return unless branch_name == 'master'
+
+ fail_with "Create a branch first!"
+ end
+
+ def assert_existing_feature_flag!
+ existing_path = all_feature_flag_names[options.name]
+ return unless existing_path
+ return if options.force
+
+ fail_with "#{existing_path} already exists! Use `--force` to overwrite."
+ end
+
+ def assert_name!
+ return if options.name.match(/\A[a-z0-9_-]+\Z/)
+
+ fail_with "Provide a name for the feature flag that is [a-z0-9_-]"
+ end
+
+ def file_path
+ feature_flags_paths.last
+ .sub('**', options.type.to_s)
+ .sub('*.yml', options.name + '.yml')
+ end
+
+ def all_feature_flag_names
+ @all_feature_flag_names ||=
+ feature_flags_paths.map do |glob_path|
+ Dir.glob(glob_path).map do |path|
+ [File.basename(path, '.yml'), path]
+ end
+ end.flatten(1).to_h
+ end
+
+ def feature_flags_paths
+ paths = []
+ paths << File.join('config', 'feature_flags', '**', '*.yml')
+ paths << File.join('ee', 'config', 'feature_flags', '**', '*.yml') if ee?
+ paths
+ end
+
+ def ee?
+ options.ee
+ end
+
+ def branch_name
+ @branch_name ||= capture_stdout(%w[git symbolic-ref --short HEAD]).strip
+ end
+end
+
+if $0 == __FILE__
+ begin
+ options = FeatureFlagOptionParser.parse(ARGV)
+ FeatureFlagCreator.new(options).execute
+ rescue FeatureFlagHelpers::Abort => ex
+ $stderr.puts ex.message
+ exit 1
+ rescue FeatureFlagHelpers::Done
+ exit
+ end
+end
+
+# vim: ft=ruby
diff --git a/changelogs/unreleased/eb-report-file-size-mechanism.yml b/changelogs/unreleased/eb-report-file-size-mechanism.yml
new file mode 100644
index 00000000000..e8f30d9149d
--- /dev/null
+++ b/changelogs/unreleased/eb-report-file-size-mechanism.yml
@@ -0,0 +1,5 @@
+---
+title: Add plan limits for max size per artifact type
+merge_request: 34767
+author:
+type: added
diff --git a/changelogs/unreleased/remove-dead-elasticsearch-indexing-code.yml b/changelogs/unreleased/remove-dead-elasticsearch-indexing-code.yml
new file mode 100644
index 00000000000..56d40dbc9ce
--- /dev/null
+++ b/changelogs/unreleased/remove-dead-elasticsearch-indexing-code.yml
@@ -0,0 +1,5 @@
+---
+title: Remove dead Elasticsearch indexing code
+merge_request: 35936
+author:
+type: other
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index a8ce1946244..0449ad1b8a5 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -78,8 +78,6 @@
- 1
- - detect_repository_languages
- 1
-- - elastic_batch_project_indexer
- - 1
- - elastic_commit_indexer
- 1
- - elastic_delete_project
diff --git a/db/migrate/20200616124338_add_plan_limits_for_max_size_per_artifact_type.rb b/db/migrate/20200616124338_add_plan_limits_for_max_size_per_artifact_type.rb
new file mode 100644
index 00000000000..28fadb495dd
--- /dev/null
+++ b/db/migrate/20200616124338_add_plan_limits_for_max_size_per_artifact_type.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+class AddPlanLimitsForMaxSizePerArtifactType < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ # We need to set the 20mb default for lsif for backward compatibility
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34767#note_371619075
+ add_column :plan_limits, "ci_max_artifact_size_lsif", :integer, default: 20, null: false
+
+ artifact_types.each do |type|
+ add_column :plan_limits, "ci_max_artifact_size_#{type}", :integer, default: 0, null: false
+ end
+ end
+
+ private
+
+ def artifact_types
+ # The list of artifact types (except lsif) from Ci::JobArtifact file_type enum as of this writing.
+ # Intentionally duplicated so that the migration won't change behavior
+ # if ever we remove or add more to the list later on.
+ %w[
+ archive
+ metadata
+ trace
+ junit
+ sast
+ dependency_scanning
+ container_scanning
+ dast
+ codequality
+ license_management
+ license_scanning
+ performance
+ metrics
+ metrics_referee
+ network_referee
+ dotenv
+ cobertura
+ terraform
+ accessibility
+ cluster_applications
+ secret_detection
+ requirements
+ coverage_fuzzing
+ ]
+ end
+end
diff --git a/db/migrate/20200706005325_remove_elastic_batch_project_indexer_worker_queue.rb b/db/migrate/20200706005325_remove_elastic_batch_project_indexer_worker_queue.rb
new file mode 100644
index 00000000000..07854096a8b
--- /dev/null
+++ b/db/migrate/20200706005325_remove_elastic_batch_project_indexer_worker_queue.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class RemoveElasticBatchProjectIndexerWorkerQueue < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ Sidekiq.redis do |conn|
+ conn.del "queue:elastic_batch_project_indexer"
+ end
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index 90a8b9e6a72..bb5dcbbe68c 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -13762,7 +13762,31 @@ CREATE TABLE public.plan_limits (
ci_pipeline_schedules integer DEFAULT 10 NOT NULL,
offset_pagination_limit integer DEFAULT 50000 NOT NULL,
ci_instance_level_variables integer DEFAULT 25 NOT NULL,
- storage_size_limit integer DEFAULT 0 NOT NULL
+ storage_size_limit integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_lsif integer DEFAULT 20 NOT NULL,
+ ci_max_artifact_size_archive integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_metadata integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_trace integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_junit integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_sast integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_dependency_scanning integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_container_scanning integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_dast integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_codequality integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_license_management integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_license_scanning integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_performance integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_metrics integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_metrics_referee integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_network_referee integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_dotenv integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_cobertura integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_terraform integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_accessibility integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_cluster_applications integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_secret_detection integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_requirements integer DEFAULT 0 NOT NULL,
+ ci_max_artifact_size_coverage_fuzzing integer DEFAULT 0 NOT NULL
);
CREATE SEQUENCE public.plan_limits_id_seq
@@ -23510,6 +23534,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200615193524
20200615232735
20200615234047
+20200616124338
20200616145031
20200617000757
20200617001001
@@ -23546,5 +23571,6 @@ COPY "schema_migrations" (version) FROM STDIN;
20200626130220
20200702123805
20200703154822
+20200706005325
\.
diff --git a/doc/development/what_requires_downtime.md b/doc/development/what_requires_downtime.md
index 975df031cec..d9907abbb92 100644
--- a/doc/development/what_requires_downtime.md
+++ b/doc/development/what_requires_downtime.md
@@ -202,6 +202,21 @@ end
And that's it, we're done!
+### Casting data to a new type
+
+Some type changes require casting data to a new type. For example when changing from `text` to `jsonb`.
+In this case, use the `type_cast_function` option.
+Make sure there is no bad data and the cast will always succeed. You can also provide a custom function that handles
+casting errors.
+
+Example migration:
+
+```ruby
+ def up
+ change_column_type_concurrently :users, :settings, :jsonb, type_cast_function: 'jsonb'
+ end
+```
+
## Changing The Schema For Large Tables
While `change_column_type_concurrently` and `rename_column_concurrently` can be
diff --git a/doc/integration/elasticsearch.md b/doc/integration/elasticsearch.md
index d3b1526a895..bd7230c2f68 100644
--- a/doc/integration/elasticsearch.md
+++ b/doc/integration/elasticsearch.md
@@ -585,12 +585,6 @@ Here are some common pitfalls and how to overcome them:
You can run `sudo gitlab-rake gitlab:elastic:projects_not_indexed` to display projects that aren't indexed.
-- **No new data is added to the Elasticsearch index when I push code**
-
- When performing the initial indexing of blobs, we lock all projects until the project finishes indexing. It could
- happen that an error during the process causes one or multiple projects to remain locked. In order to unlock them,
- run the `gitlab:elastic:clear_locked_projects` Rake task.
-
- **"Can't specify parent if no parent field has been configured"**
If you enabled Elasticsearch before GitLab 8.12 and have not rebuilt indexes you will get
diff --git a/lib/api/ci/runner.rb b/lib/api/ci/runner.rb
index b216406695b..4c96acd3ea7 100644
--- a/lib/api/ci/runner.rb
+++ b/lib/api/ci/runner.rb
@@ -218,25 +218,31 @@ module API
params do
requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token)
+
+ # NOTE:
+ # In current runner, filesize parameter would be empty here. This is because archive is streamed by runner,
+ # so the archive size is not known ahead of time. Streaming is done to not use additional I/O on
+ # Runner to first save, and then send via Network.
optional :filesize, type: Integer, desc: %q(Artifacts filesize)
+
optional :artifact_type, type: String, desc: %q(The type of artifact),
default: 'archive', values: ::Ci::JobArtifact.file_types.keys
end
post '/:id/artifacts/authorize' do
not_allowed! unless Gitlab.config.artifacts.enabled
require_gitlab_workhorse!
- Gitlab::Workhorse.verify_api_request!(headers)
job = authenticate_job!
- service = ::Ci::AuthorizeJobArtifactService.new(job, params, max_size: max_artifacts_size(job))
-
- forbidden! if service.forbidden?
- file_too_large! if service.too_large?
+ result = ::Ci::CreateJobArtifactsService.new(job).authorize(artifact_type: params[:artifact_type], filesize: params[:filesize])
- status 200
- content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
- service.headers
+ if result[:status] == :success
+ content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+ status :ok
+ result[:headers]
+ else
+ render_api_error!(result[:message], result[:http_status])
+ end
end
desc 'Upload artifacts for job' do
@@ -267,9 +273,7 @@ module API
artifacts = params[:file]
metadata = params[:metadata]
- file_too_large! unless artifacts.size < max_artifacts_size(job)
-
- result = ::Ci::CreateJobArtifactsService.new(job.project).execute(job, artifacts, params, metadata_file: metadata)
+ result = ::Ci::CreateJobArtifactsService.new(job).execute(artifacts, params, metadata_file: metadata)
if result[:status] == :success
status :created
diff --git a/lib/api/helpers/runner.rb b/lib/api/helpers/runner.rb
index d3824597d08..34a2fb09875 100644
--- a/lib/api/helpers/runner.rb
+++ b/lib/api/helpers/runner.rb
@@ -69,11 +69,6 @@ module API
token && job.valid_token?(token)
end
- def max_artifacts_size(job)
- max_size = job.project.closest_setting(:max_artifacts_size)
- max_size.megabytes.to_i
- end
-
def job_forbidden!(job, reason)
header 'Job-Status', job.status
forbidden!(reason)
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index 62a053e9c5a..f9ac1fae042 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -477,7 +477,7 @@ module Gitlab
# type is used.
# batch_column_name - option is for tables without primary key, in this
# case another unique integer column can be used. Example: :user_id
- def rename_column_concurrently(table, old, new, type: nil, batch_column_name: :id)
+ def rename_column_concurrently(table, old, new, type: nil, type_cast_function: nil, batch_column_name: :id)
unless column_exists?(table, batch_column_name)
raise "Column #{batch_column_name} does not exist on #{table}"
end
@@ -488,7 +488,7 @@ module Gitlab
check_trigger_permissions!(table)
- create_column_from(table, old, new, type: type, batch_column_name: batch_column_name)
+ create_column_from(table, old, new, type: type, batch_column_name: batch_column_name, type_cast_function: type_cast_function)
install_rename_triggers(table, old, new)
end
@@ -536,10 +536,10 @@ module Gitlab
# table - The table containing the column.
# column - The name of the column to change.
# new_type - The new column type.
- def change_column_type_concurrently(table, column, new_type)
+ def change_column_type_concurrently(table, column, new_type, type_cast_function: nil)
temp_column = "#{column}_for_type_change"
- rename_column_concurrently(table, column, temp_column, type: new_type)
+ rename_column_concurrently(table, column, temp_column, type: new_type, type_cast_function: type_cast_function)
end
# Performs cleanup of a concurrent type change.
@@ -1268,7 +1268,7 @@ into similar problems in the future (e.g. when new tables are created).
"ON DELETE #{on_delete.upcase}"
end
- def create_column_from(table, old, new, type: nil, batch_column_name: :id)
+ def create_column_from(table, old, new, type: nil, batch_column_name: :id, type_cast_function: nil)
old_col = column_for(table, old)
new_type = type || old_col.type
@@ -1282,7 +1282,13 @@ into similar problems in the future (e.g. when new tables are created).
# necessary since we copy over old values further down.
change_column_default(table, new, old_col.default) unless old_col.default.nil?
- update_column_in_batches(table, new, Arel::Table.new(table)[old], batch_column_name: batch_column_name)
+ old_value = Arel::Table.new(table)[old]
+
+ if type_cast_function.present?
+ old_value = Arel::Nodes::NamedFunction.new(type_cast_function, [old_value])
+ end
+
+ update_column_in_batches(table, new, old_value, batch_column_name: batch_column_name)
add_not_null_constraint(table, new) unless old_col.null
diff --git a/lib/gitlab/metrics/web_transaction.rb b/lib/gitlab/metrics/web_transaction.rb
index a386800bef1..2064f9290d3 100644
--- a/lib/gitlab/metrics/web_transaction.rb
+++ b/lib/gitlab/metrics/web_transaction.rb
@@ -65,7 +65,10 @@ module Gitlab
if route
path = endpoint_paths_cache[route.request_method][route.path]
- { controller: 'Grape', action: "#{route.request_method} #{path}" }
+
+ # Feature categories will be added for grape endpoints in
+ # https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/462
+ { controller: 'Grape', action: "#{route.request_method} #{path}", feature_category: '' }
end
end
diff --git a/package.json b/package.json
index 680c850bd9d..0a9cda99156 100644
--- a/package.json
+++ b/package.json
@@ -40,8 +40,8 @@
"@babel/plugin-syntax-import-meta": "^7.10.1",
"@babel/preset-env": "^7.10.1",
"@gitlab/at.js": "1.5.5",
- "@gitlab/svgs": "1.147.0",
- "@gitlab/ui": "17.10.1",
+ "@gitlab/svgs": "1.148.0",
+ "@gitlab/ui": "17.16.0",
"@gitlab/visual-review-tools": "1.6.1",
"@rails/actioncable": "^6.0.3-1",
"@sentry/browser": "^5.10.2",
diff --git a/spec/bin/feature_flag_spec.rb b/spec/bin/feature_flag_spec.rb
new file mode 100644
index 00000000000..3a315a13686
--- /dev/null
+++ b/spec/bin/feature_flag_spec.rb
@@ -0,0 +1,191 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+load File.expand_path('../../bin/feature-flag', __dir__)
+
+RSpec.describe 'bin/feature-flag' do
+ using RSpec::Parameterized::TableSyntax
+
+ describe FeatureFlagCreator do
+ let(:argv) { %w[feature-flag-name -t development -g group::memory -i https://url] }
+ let(:options) { FeatureFlagOptionParser.parse(argv) }
+ let(:creator) { described_class.new(options) }
+ let(:existing_flag) { File.join('config', 'feature_flags', 'development', 'existing-feature-flag.yml') }
+
+ before do
+ # create a dummy feature flag
+ FileUtils.mkdir_p(File.dirname(existing_flag))
+ File.write(existing_flag, '{}')
+
+ # ignore writes
+ allow(File).to receive(:write).and_return(true)
+
+ # ignore stdin
+ allow($stdin).to receive(:gets).and_raise('EOF')
+
+ # ignore Git commands
+ allow(creator).to receive(:branch_name) { 'feature-branch' }
+ end
+
+ after do
+ FileUtils.rm_f(existing_flag)
+ end
+
+ subject { creator.execute }
+
+ it 'properly creates a feature flag' do
+ expect(File).to receive(:write).with(
+ File.join('config', 'feature_flags', 'development', 'feature-flag-name.yml'),
+ anything)
+
+ expect do
+ subject
+ end.to output(/name: feature-flag-name/).to_stdout
+ end
+
+ context 'when running on master' do
+ it 'requires feature branch' do
+ expect(creator).to receive(:branch_name) { 'master' }
+
+ expect { subject }.to raise_error(FeatureFlagHelpers::Abort, /Create a branch first/)
+ end
+ end
+
+ context 'validates feature flag name' do
+ where(:argv, :ex) do
+ %w[.invalid.feature.flag] | /Provide a name for the feature flag that is/
+ %w[existing-feature-flag] | /already exists!/
+ end
+
+ with_them do
+ it do
+ expect { subject }.to raise_error(ex)
+ end
+ end
+ end
+ end
+
+ describe FeatureFlagOptionParser do
+ describe '.parse' do
+ where(:param, :argv, :result) do
+ :name | %w[foo] | 'foo'
+ :amend | %w[foo --amend] | true
+ :force | %w[foo -f] | true
+ :force | %w[foo --force] | true
+ :ee | %w[foo -e] | true
+ :ee | %w[foo --ee] | true
+ :introduced_by_url | %w[foo -m https://url] | 'https://url'
+ :introduced_by_url | %w[foo --introduced-by-url https://url] | 'https://url'
+ :rollout_issue_url | %w[foo -i https://url] | 'https://url'
+ :rollout_issue_url | %w[foo --rollout-issue-url https://url] | 'https://url'
+ :dry_run | %w[foo -n] | true
+ :dry_run | %w[foo --dry-run] | true
+ :type | %w[foo -t development] | :development
+ :type | %w[foo --type development] | :development
+ :type | %w[foo -t invalid] | nil
+ :type | %w[foo --type invalid] | nil
+ :group | %w[foo -g group::memory] | 'group::memory'
+ :group | %w[foo --group group::memory] | 'group::memory'
+ :group | %w[foo -g invalid] | nil
+ :group | %w[foo --group invalid] | nil
+ end
+
+ with_them do
+ it do
+ options = described_class.parse(Array(argv))
+
+ expect(options.public_send(param)).to eq(result)
+ end
+ end
+
+ it 'missing feature flag name' do
+ expect do
+ expect { described_class.parse(%w[--amend]) }.to output(/Feature flag name is required/).to_stdout
+ end.to raise_error(FeatureFlagHelpers::Abort)
+ end
+
+ it 'parses -h' do
+ expect do
+ expect { described_class.parse(%w[foo -h]) }.to output(/Usage:/).to_stdout
+ end.to raise_error(FeatureFlagHelpers::Done)
+ end
+ end
+
+ describe '.read_type' do
+ let(:type) { 'development' }
+
+ it 'reads type from $stdin' do
+ expect($stdin).to receive(:gets).and_return(type)
+ expect do
+ expect(described_class.read_type).to eq(:development)
+ end.to output(/specify the type/).to_stdout
+ end
+
+ context 'invalid type given' do
+ let(:type) { 'invalid' }
+
+ it 'shows error message and retries' do
+ expect($stdin).to receive(:gets).and_return(type)
+ expect($stdin).to receive(:gets).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_type }.to raise_error(/EOF/)
+ end.to output(/specify the type/).to_stdout
+ .and output(/Invalid type specified/).to_stderr
+ end
+ end
+ end
+
+ describe '.read_group' do
+ let(:group) { 'group::memory' }
+
+ it 'reads type from $stdin' do
+ expect($stdin).to receive(:gets).and_return(group)
+ expect do
+ expect(described_class.read_group).to eq('group::memory')
+ end.to output(/specify the group/).to_stdout
+ end
+
+ context 'invalid group given' do
+ let(:type) { 'invalid' }
+
+ it 'shows error message and retries' do
+ expect($stdin).to receive(:gets).and_return(type)
+ expect($stdin).to receive(:gets).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_group }.to raise_error(/EOF/)
+ end.to output(/specify the group/).to_stdout
+ .and output(/Group needs to include/).to_stderr
+ end
+ end
+ end
+
+ describe '.rollout_issue_url' do
+ let(:options) { OpenStruct.new(name: 'foo', type: :development) }
+ let(:url) { 'https://issue' }
+
+ it 'reads type from $stdin' do
+ expect($stdin).to receive(:gets).and_return(url)
+ expect do
+ expect(described_class.read_issue_url(options)).to eq('https://issue')
+ end.to output(/Paste URL here/).to_stdout
+ end
+
+ context 'invalid URL given' do
+ let(:type) { 'invalid' }
+
+ it 'shows error message and retries' do
+ expect($stdin).to receive(:gets).and_return(type)
+ expect($stdin).to receive(:gets).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_issue_url(options) }.to raise_error(/EOF/)
+ end.to output(/Paste URL here/).to_stdout
+ .and output(/URL needs to start/).to_stderr
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index ca034af8249..6c75e766fbd 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Projects::PipelinesController do
end
end
- it 'does not execute N+1 queries' do
+ it 'executes N+1 queries' do
get_pipelines_index_json
control_count = ActiveRecord::QueryRecorder.new do
@@ -56,10 +56,31 @@ RSpec.describe Projects::PipelinesController do
create_all_pipeline_types
# There appears to be one extra query for Pipelines#has_warnings? for some reason
- expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
+ expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 7)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['pipelines'].count).to eq 12
end
+
+ context 'with build_report_summary turned off' do
+ before do
+ stub_feature_flags(build_report_summary: false)
+ end
+
+ it 'does not execute N+1 queries' do
+ get_pipelines_index_json
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ get_pipelines_index_json
+ end.count
+
+ create_all_pipeline_types
+
+ # There appears to be one extra query for Pipelines#has_warnings? for some reason
+ expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['pipelines'].count).to eq 12
+ end
+ end
end
it 'does not include coverage data for the pipelines' do
diff --git a/spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap b/spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap
index 2a12f58ed94..9f30ed43093 100644
--- a/spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap
+++ b/spec/frontend/registry/shared/components/__snapshots__/expiration_policy_fields_spec.js.snap
@@ -123,6 +123,7 @@ exports[`Expiration Policy Form renders 1`] = `
<gl-form-textarea-stub
disabled="true"
id="expiration-policy-name-matching"
+ noresize="true"
placeholder=".*"
trim=""
value=""
@@ -139,6 +140,7 @@ exports[`Expiration Policy Form renders 1`] = `
<gl-form-textarea-stub
disabled="true"
id="expiration-policy-keep-name"
+ noresize="true"
placeholder=""
trim=""
value=""
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 12f97d23a6b..37c820fceaa 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -690,12 +690,28 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.rename_column_concurrently(:users, :old, :new)
end
+ context 'with existing records and type casting' do
+ let(:trigger_name) { model.rename_trigger_name(:users, :id, :new) }
+ let(:user) { create(:user) }
+
+ it 'copies the value to the new column using the type_cast_function', :aggregate_failures do
+ expect(model).to receive(:copy_indexes).with(:users, :id, :new)
+ expect(model).to receive(:add_not_null_constraint).with(:users, :new)
+ expect(model).to receive(:execute).with("UPDATE \"users\" SET \"new\" = cast_to_jsonb_with_default(\"users\".\"id\") WHERE \"users\".\"id\" >= #{user.id}")
+ expect(model).to receive(:execute).with("DROP TRIGGER IF EXISTS #{trigger_name}\nON \"users\"\n")
+ expect(model).to receive(:execute).with("CREATE TRIGGER #{trigger_name}\nBEFORE INSERT OR UPDATE\nON \"users\"\nFOR EACH ROW\nEXECUTE PROCEDURE #{trigger_name}()\n")
+ expect(model).to receive(:execute).with("CREATE OR REPLACE FUNCTION #{trigger_name}()\nRETURNS trigger AS\n$BODY$\nBEGIN\n NEW.\"new\" := NEW.\"id\";\n RETURN NEW;\nEND;\n$BODY$\nLANGUAGE 'plpgsql'\nVOLATILE\n")
+
+ model.rename_column_concurrently(:users, :id, :new, type_cast_function: 'cast_to_jsonb_with_default')
+ end
+ end
+
it 'passes the batch_column_name' do
expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true)
expect(model).to receive(:check_trigger_permissions!).and_return(true)
expect(model).to receive(:create_column_from).with(
- :users, :old, :new, type: nil, batch_column_name: :other_batch_column
+ :users, :old, :new, type: nil, batch_column_name: :other_batch_column, type_cast_function: nil
).and_return(true)
expect(model).to receive(:install_rename_triggers).and_return(true)
@@ -703,6 +719,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
model.rename_column_concurrently(:users, :old, :new, batch_column_name: :other_batch_column)
end
+ it 'passes the type_cast_function' do
+ expect(model).to receive(:create_column_from).with(
+ :users, :old, :new, type: nil, batch_column_name: :id, type_cast_function: 'JSON'
+ ).and_return(true)
+
+ model.rename_column_concurrently(:users, :old, :new, type_cast_function: 'JSON')
+ end
+
it 'raises an error with invalid batch_column_name' do
expect do
model.rename_column_concurrently(:users, :old, :new, batch_column_name: :invalid)
@@ -866,10 +890,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#change_column_type_concurrently' do
it 'changes the column type' do
expect(model).to receive(:rename_column_concurrently)
- .with('users', 'username', 'username_for_type_change', type: :text)
+ .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: nil)
model.change_column_type_concurrently('users', 'username', :text)
end
+
+ context 'with type cast' do
+ it 'changes the column type with casting the value to the new type' do
+ expect(model).to receive(:rename_column_concurrently)
+ .with('users', 'username', 'username_for_type_change', type: :text, type_cast_function: 'JSON')
+
+ model.change_column_type_concurrently('users', 'username', :text, type_cast_function: 'JSON')
+ end
+ end
end
describe '#cleanup_concurrent_column_type_change' do
diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb
index 5f5e780f007..389a0a04904 100644
--- a/spec/lib/gitlab/metrics/web_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb
@@ -70,6 +70,9 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
describe '#labels' do
+ let(:request) { double(:request, format: double(:format, ref: :html)) }
+ let(:controller_class) { double(:controller_class, name: 'TestController') }
+
context 'when request goes to Grape endpoint' do
before do
route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)')
@@ -77,8 +80,9 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
env['api.endpoint'] = endpoint
end
+
it 'provides labels with the method and path of the route in the grape endpoint' do
- expect(transaction.labels).to eq({ controller: 'Grape', action: 'GET /projects/:id/archive' })
+ expect(transaction.labels).to eq({ controller: 'Grape', action: 'GET /projects/:id/archive', feature_category: '' })
end
it 'does not provide labels if route infos are missing' do
@@ -92,9 +96,6 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
context 'when request goes to ActionController' do
- let(:request) { double(:request, format: double(:format, ref: :html)) }
- let(:controller_class) { double(:controller_class, name: 'TestController') }
-
before do
controller = double(:controller, class: controller_class, action_name: 'show', request: request)
@@ -129,6 +130,19 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
end
+ it 'returns the same labels for API and controller requests' do
+ route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)')
+ endpoint = double(:endpoint, route: route)
+ api_env = { 'api.endpoint' => endpoint }
+ api_labels = described_class.new(api_env).labels
+
+ controller = double(:controller, class: controller_class, action_name: 'show', request: request)
+ controller_env = { 'action_controller.instance' => controller }
+ controller_labels = described_class.new(controller_env).labels
+
+ expect(api_labels.keys).to contain_exactly(*controller_labels.keys)
+ end
+
it 'returns no labels when no route information is present in env' do
expect(transaction.labels).to eq({})
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index ae20e351761..21a1b3fda75 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -501,4 +501,100 @@ RSpec.describe Ci::JobArtifact do
end
end
end
+
+ describe '.file_types' do
+ context 'all file types have corresponding limit' do
+ let_it_be(:plan_limits) { create(:plan_limits) }
+
+ where(:file_type) do
+ described_class.file_types.keys
+ end
+
+ with_them do
+ let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{file_type}" }
+
+ it { expect(plan_limits.attributes).to include(limit_name), file_type_limit_failure_message(file_type, limit_name) }
+ end
+ end
+ end
+
+ describe '.max_artifact_size' do
+ let(:build) { create(:ci_build) }
+
+ subject(:max_size) { described_class.max_artifact_size(type: artifact_type, project: build.project) }
+
+ context 'when file type is supported' do
+ let(:project_closest_setting) { 1024 }
+ let(:artifact_type) { 'junit' }
+
+ before do
+ stub_feature_flags(ci_max_artifact_size_per_type: flag_enabled)
+ allow(build.project).to receive(:closest_setting).with(:max_artifacts_size).and_return(project_closest_setting)
+ end
+
+ shared_examples_for 'basing off the project closest setting' do
+ it { is_expected.to eq(project_closest_setting.megabytes.to_i) }
+ end
+
+ shared_examples_for 'basing off the plan limit' do
+ it { is_expected.to eq(max_size_for_type.megabytes.to_i) }
+ end
+
+ context 'and feature flag for custom max size per type is enabled' do
+ let(:flag_enabled) { true }
+ let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{artifact_type}" }
+
+ let!(:plan_limits) { create(:plan_limits, :default_plan) }
+
+ context 'and plan limit is disabled for the given artifact type' do
+ before do
+ plan_limits.update!(limit_name => 0)
+ end
+
+ it_behaves_like 'basing off the project closest setting'
+
+ context 'and project closest setting results to zero' do
+ let(:project_closest_setting) { 0 }
+
+ it { is_expected.to eq(0) }
+ end
+ end
+
+ context 'and plan limit is enabled for the given artifact type' do
+ before do
+ plan_limits.update!(limit_name => max_size_for_type)
+ end
+
+ context 'and plan limit is smaller than project setting' do
+ let(:max_size_for_type) { project_closest_setting - 1 }
+
+ it_behaves_like 'basing off the plan limit'
+ end
+
+ context 'and plan limit is smaller than project setting' do
+ let(:max_size_for_type) { project_closest_setting + 1 }
+
+ it_behaves_like 'basing off the project closest setting'
+ end
+ end
+ end
+
+ context 'and feature flag for custom max size per type is disabled' do
+ let(:flag_enabled) { false }
+
+ it_behaves_like 'basing off the project closest setting'
+ end
+ end
+ end
+
+ def file_type_limit_failure_message(type, limit_name)
+ <<~MSG
+ The artifact type `#{type}` is missing its counterpart plan limit which is expected to be named `#{limit_name}`.
+
+ Please refer to https://docs.gitlab.com/ee/development/application_limits.html on how to add new plan limit columns.
+
+ Take note that while existing max size plan limits default to 0, succeeding new limits are recommended to have
+ non-zero default values.
+ MSG
+ end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index 788b7d92bd2..18dd3ca7951 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -3,57 +3,214 @@
require 'spec_helper'
RSpec.describe PlanLimits do
- let(:plan_limits) { create(:plan_limits) }
- let(:model) { ProjectHook }
- let(:count) { model.count }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:plan_limits) { create(:plan_limits) }
+ let(:project_hooks_count) { 2 }
before do
- create(:project_hook)
+ create_list(:project_hook, project_hooks_count, project: project)
end
- context 'without plan limits configured' do
- describe '#exceeded?' do
- it 'does not exceed any relation offset' do
- expect(plan_limits.exceeded?(:project_hooks, model)).to be false
- expect(plan_limits.exceeded?(:project_hooks, count)).to be false
+ describe '#exceeded?' do
+ let(:alternate_limit) { double('an alternate limit value') }
+
+ subject(:exceeded_limit) { plan_limits.exceeded?(:project_hooks, limit_subject, alternate_limit: alternate_limit) }
+
+ before do
+ allow(plan_limits).to receive(:limit_for).with(:project_hooks, alternate_limit: alternate_limit).and_return(limit)
+ end
+
+ shared_examples_for 'comparing limits' do
+ context 'when limit for given name results to a disabled value' do
+ let(:limit) { nil }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when limit for given name results to a non-disabled value' do
+ context 'and given count is smaller than limit' do
+ let(:limit) { project_hooks_count + 1 }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'and given count is equal to the limit' do
+ let(:limit) { project_hooks_count }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'and given count is greater than the limit' do
+ let(:limit) { project_hooks_count - 1 }
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
+ context 'when given limit subject is an integer' do
+ let(:limit_subject) { project.hooks.count }
+
+ it_behaves_like 'comparing limits'
+ end
+
+ context 'when given limit subject is an ActiveRecord::Relation' do
+ let(:limit_subject) { project.hooks }
+
+ it_behaves_like 'comparing limits'
+ end
+
+ context 'when given limit subject is something else' do
+ let(:limit_subject) { ProjectHook }
+ let(:limit) { 100 }
+
+ it 'raises an error' do
+ expect { exceeded_limit }.to raise_error(ArgumentError)
end
end
end
- context 'with plan limits configured' do
- before do
- plan_limits.update!(project_hooks: 2)
+ describe '#limit_for' do
+ let(:alternate_limit) { nil }
+
+ subject(:limit) { plan_limits.limit_for(:project_hooks, alternate_limit: alternate_limit) }
+
+ context 'when given limit name does not exist' do
+ it 'raises an error' do
+ expect { plan_limits.limit_for(:project_foo) }.to raise_error(described_class::LimitUndefinedError)
+ end
end
- describe '#exceeded?' do
- it 'does not exceed the relation offset' do
- expect(plan_limits.exceeded?(:project_hooks, model)).to be false
- expect(plan_limits.exceeded?(:project_hooks, count)).to be false
+ context 'when given limit name is disabled' do
+ before do
+ plan_limits.update!(project_hooks: 0)
+ end
+
+ it { is_expected.to eq(nil) }
+
+ context 'and alternate_limit is a non-zero integer' do
+ let(:alternate_limit) { 1 }
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'and alternate_limit is zero' do
+ let(:alternate_limit) { 0 }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'and alternate_limit is a proc that returns non-zero integer' do
+ let(:alternate_limit) { -> { 1 } }
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'and alternate_limit is a proc that returns zero' do
+ let(:alternate_limit) { -> { 0 } }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'and alternate_limit is a proc that returns nil' do
+ let(:alternate_limit) { -> { nil } }
+
+ it { is_expected.to eq(nil) }
end
end
- context 'with boundary values' do
+ context 'when given limit name is enabled' do
+ let(:plan_limit_value) { 2 }
+
before do
- create(:project_hook)
+ plan_limits.update!(project_hooks: plan_limit_value)
end
- describe '#exceeded?' do
- it 'does exceed the relation offset' do
- expect(plan_limits.exceeded?(:project_hooks, model)).to be true
- expect(plan_limits.exceeded?(:project_hooks, count)).to be true
- end
+ context 'and alternate_limit is a non-zero integer that is bigger than the plan limit' do
+ let(:alternate_limit) { plan_limit_value + 1 }
+
+ it { is_expected.to eq(plan_limit_value) }
+ end
+
+ context 'and alternate_limit is a non-zero integer that is smaller than the plan limit' do
+ let(:alternate_limit) { plan_limit_value - 1 }
+
+ it { is_expected.to eq(alternate_limit) }
+ end
+
+ context 'and alternate_limit is zero' do
+ let(:alternate_limit) { 0 }
+
+ it { is_expected.to eq(plan_limit_value) }
+ end
+
+ context 'and alternate_limit is a proc that returns non-zero integer that is bigger than the plan limit' do
+ let(:alternate_limit) { -> { plan_limit_value + 1 } }
+
+ it { is_expected.to eq(plan_limit_value) }
+ end
+
+ context 'and alternate_limit is a proc that returns non-zero integer that is smaller than the plan limit' do
+ let(:alternate_limit) { -> { plan_limit_value - 1 } }
+
+ it { is_expected.to eq(alternate_limit.call) }
+ end
+
+ context 'and alternate_limit is a proc that returns zero' do
+ let(:alternate_limit) { -> { 0 } }
+
+ it { is_expected.to eq(plan_limit_value) }
+ end
+
+ context 'and alternate_limit is a proc that returns nil' do
+ let(:alternate_limit) { -> { nil } }
+
+ it { is_expected.to eq(plan_limit_value) }
end
end
end
context 'validates default values' do
+ # TODO: For now, these columns have default values set to 0.
+ # Each artifact type listed here have their own matching issues to determine
+ # the actual limit value. In each of those issues, the default value should also be updated to
+ # a non-zero value. Also update existing values of zero to whatever the default value will be.
+ # For a list of the issues, see: https://gitlab.com/gitlab-org/gitlab/-/issues/211378#note_355619970
+ let(:disabled_max_artifact_size_columns) do
+ %w[
+ ci_max_artifact_size_archive
+ ci_max_artifact_size_metadata
+ ci_max_artifact_size_trace
+ ci_max_artifact_size_junit
+ ci_max_artifact_size_sast
+ ci_max_artifact_size_dependency_scanning
+ ci_max_artifact_size_container_scanning
+ ci_max_artifact_size_dast
+ ci_max_artifact_size_codequality
+ ci_max_artifact_size_license_management
+ ci_max_artifact_size_license_scanning
+ ci_max_artifact_size_performance
+ ci_max_artifact_size_metrics
+ ci_max_artifact_size_metrics_referee
+ ci_max_artifact_size_network_referee
+ ci_max_artifact_size_dotenv
+ ci_max_artifact_size_cobertura
+ ci_max_artifact_size_terraform
+ ci_max_artifact_size_accessibility
+ ci_max_artifact_size_cluster_applications
+ ci_max_artifact_size_secret_detection
+ ci_max_artifact_size_requirements
+ ci_max_artifact_size_coverage_fuzzing
+ ]
+ end
+
let(:columns_with_zero) do
%w[
ci_active_pipelines
ci_pipeline_size
ci_active_jobs
storage_size_limit
- ]
+ ] + disabled_max_artifact_size_columns
end
it "has positive values for enabled limits" do
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 52c64a6f0cb..3659e6b973e 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -189,26 +189,6 @@ RSpec.describe ProjectStatistics do
statistics.refresh!
end
end
-
- context 'when snippets_size is updated' do
- it 'schedules the aggregation worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async)
-
- statistics.refresh!(only: [:snippets_size])
- end
-
- context 'when feature flag :namespace_snippets_size_stat is disabled' do
- it 'does not schedules an aggregation worker' do
- stub_feature_flags(namespace_snippets_size_stat: false)
-
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
-
- statistics.refresh!(only: [:snippets_size])
- end
- end
- end
end
context 'when the column is not namespace relatable' do
diff --git a/spec/requests/api/ci/runner_spec.rb b/spec/requests/api/ci/runner_spec.rb
index 6323d1f68e3..20bd59c5c72 100644
--- a/spec/requests/api/ci/runner_spec.rb
+++ b/spec/requests/api/ci/runner_spec.rb
@@ -1592,8 +1592,105 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
job.run!
end
+ shared_examples_for 'rejecting artifacts that are too large' do
+ let(:filesize) { 100.megabytes.to_i }
+ let(:sample_max_size) { (filesize / 1.megabyte) - 10 } # Set max size to be smaller than file size to trigger error
+
+ shared_examples_for 'failed request' do
+ it 'responds with payload too large error' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:payload_too_large)
+ end
+ end
+
+ context 'based on plan limit setting' do
+ let(:application_max_size) { sample_max_size + 100 }
+ let(:limit_name) { "#{Ci::JobArtifact::PLAN_LIMIT_PREFIX}archive" }
+
+ before do
+ create(:plan_limits, :default_plan, limit_name => sample_max_size)
+ stub_application_setting(max_artifacts_size: application_max_size)
+ end
+
+ context 'and feature flag ci_max_artifact_size_per_type is enabled' do
+ before do
+ stub_feature_flags(ci_max_artifact_size_per_type: true)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'and feature flag ci_max_artifact_size_per_type is disabled' do
+ before do
+ stub_feature_flags(ci_max_artifact_size_per_type: false)
+ end
+
+ it 'bases of project closest setting' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(success_code)
+ end
+ end
+ end
+
+ context 'based on application setting' do
+ before do
+ stub_application_setting(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on root namespace setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on child namespace setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+ let(:root_namespace_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: root_namespace_max_size)
+ namespace.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on project setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+ let(:root_namespace_max_size) { sample_max_size + 10 }
+ let(:child_namespace_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: root_namespace_max_size)
+ namespace.update!(max_artifacts_size: child_namespace_max_size)
+ project.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+ end
+
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do
+ context 'and the artifact is too large' do
+ it_behaves_like 'rejecting artifacts that are too large' do
+ let(:success_code) { :ok }
+ let(:send_request) { authorize_artifacts_with_token_in_params(filesize: filesize) }
+ end
+ end
+
context 'posting artifacts to running job' do
subject do
authorize_artifacts_with_token_in_params
@@ -1651,56 +1748,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
end
-
- context 'when artifact is too large' do
- let(:sample_max_size) { 100 }
-
- shared_examples_for 'rejecting too large artifacts' do
- it 'fails to post' do
- authorize_artifacts_with_token_in_params(filesize: sample_max_size.megabytes.to_i)
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
- end
- end
-
- context 'based on application setting' do
- before do
- stub_application_setting(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'rejecting too large artifacts'
- end
-
- context 'based on root namespace setting' do
- before do
- stub_application_setting(max_artifacts_size: 200)
- root_namespace.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'rejecting too large artifacts'
- end
-
- context 'based on child namespace setting' do
- before do
- stub_application_setting(max_artifacts_size: 200)
- root_namespace.update!(max_artifacts_size: 200)
- namespace.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'rejecting too large artifacts'
- end
-
- context 'based on project setting' do
- before do
- stub_application_setting(max_artifacts_size: 200)
- root_namespace.update!(max_artifacts_size: 200)
- namespace.update!(max_artifacts_size: 200)
- project.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'rejecting too large artifacts'
- end
- end
end
context 'when using token as header' do
@@ -1757,12 +1804,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['ProcessLsif']).to be_truthy
end
- it 'fails to authorize too large artifact' do
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif, filesize: 30.megabytes)
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
- end
-
context 'code_navigation feature flag is disabled' do
it 'does not add ProcessLsif header' do
stub_feature_flags(code_navigation: false)
@@ -1799,6 +1840,32 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at }
end
+ context 'when the artifact is too large' do
+ it_behaves_like 'rejecting artifacts that are too large' do
+ # This filesize validation also happens in non remote stored files,
+ # it's just that it's hard to stub the filesize in other cases to be
+ # more than a megabyte.
+ let!(:fog_connection) do
+ stub_artifacts_object_storage(direct_upload: true)
+ end
+ let(:object) do
+ fog_connection.directories.new(key: 'artifacts').files.create(
+ key: 'tmp/uploads/12312300',
+ body: 'content'
+ )
+ end
+ let(:file_upload) { fog_to_uploaded_file(object) }
+ let(:send_request) do
+ upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => '12312300')
+ end
+ let(:success_code) { :created }
+
+ before do
+ allow(object).to receive(:content_length).and_return(filesize)
+ end
+ end
+ end
+
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
@@ -1877,16 +1944,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
- context 'when artifacts file is too large' do
- it 'fails to post too large artifact' do
- stub_application_setting(max_artifacts_size: 0)
-
- upload_artifacts(file_upload, headers_with_token)
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
- end
- end
-
context 'when artifacts post request does not contain file' do
it 'fails to post artifacts without file' do
post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index 41b34825699..e638b14765b 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -261,5 +261,29 @@ RSpec.describe PipelineEntity do
end
end
end
+
+ context 'when pipeline has build report results' do
+ let(:pipeline) { create(:ci_pipeline, :with_report_results, project: project, user: user) }
+
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(build_report_summary: true)
+ end
+
+ it 'exposes tests total count' do
+ expect(subject[:tests_total_count]).to eq(2)
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(build_report_summary: false)
+ end
+
+ it 'do not expose tests total count' do
+ expect(subject).not_to include(:tests_total_count)
+ end
+ end
+ end
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index c3a0766e6f7..cdff5f0e001 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -155,11 +155,25 @@ RSpec.describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 43 : 40
+ expected_queries = Gitlab.ee? ? 46 : 43
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
end
+
+ context 'with the :build_report_summary flag turned off' do
+ before do
+ stub_feature_flags(build_report_summary: false)
+ end
+
+ it 'verifies number of queries', :request_store do
+ recorded = ActiveRecord::QueryRecorder.new { subject }
+ expected_queries = Gitlab.ee? ? 43 : 40
+
+ expect(recorded.count).to be_within(2).of(expected_queries)
+ expect(recorded.cached_count).to eq(0)
+ end
+ end
end
context 'with different refs' do
@@ -176,11 +190,25 @@ RSpec.describe PipelineSerializer do
# pipeline. With the same ref this check is cached but if refs are
# different then there is an extra query per ref
# https://gitlab.com/gitlab-org/gitlab-foss/issues/46368
- expected_queries = Gitlab.ee? ? 46 : 43
+ expected_queries = Gitlab.ee? ? 49 : 46
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
end
+
+ context 'with the :build_report_summary flag turned off' do
+ before do
+ stub_feature_flags(build_report_summary: false)
+ end
+
+ it 'verifies number of queries', :request_store do
+ recorded = ActiveRecord::QueryRecorder.new { subject }
+ expected_queries = Gitlab.ee? ? 46 : 43
+
+ expect(recorded.count).to be_within(2).of(expected_queries)
+ expect(recorded.cached_count).to eq(0)
+ end
+ end
end
def create_pipeline(status)
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/create_job_artifacts_service_spec.rb
index e431cfd0117..56bbf0a743d 100644
--- a/spec/services/ci/create_job_artifacts_service_spec.rb
+++ b/spec/services/ci/create_job_artifacts_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::CreateJobArtifactsService do
let_it_be(:project) { create(:project) }
- let(:service) { described_class.new(project) }
+ let(:service) { described_class.new(job) }
let(:job) { create(:ci_build, project: project) }
let(:artifacts_sha256) { '0' * 64 }
let(:metadata_file) { nil }
@@ -17,7 +17,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
{
'artifact_type' => 'archive',
'artifact_format' => 'zip'
- }
+ }.with_indifferent_access
end
def file_to_upload(path, params = {})
@@ -28,7 +28,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
end
describe '#execute' do
- subject { service.execute(job, artifacts_file, params, metadata_file: metadata_file) }
+ subject { service.execute(artifacts_file, params, metadata_file: metadata_file) }
context 'locking' do
let(:old_job) { create(:ci_build, pipeline: create(:ci_pipeline, project: job.project, ref: job.ref)) }
@@ -150,7 +150,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
{
'artifact_type' => 'dotenv',
'artifact_format' => 'gzip'
- }
+ }.with_indifferent_access
end
it 'calls parse service' do
@@ -186,7 +186,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
{
'artifact_type' => 'cluster_applications',
'artifact_format' => 'gzip'
- }
+ }.with_indifferent_access
end
it 'calls cluster applications parse service' do
diff --git a/yarn.lock b/yarn.lock
index a4de32ca84a..b182c8ce1e7 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -843,15 +843,15 @@
eslint-plugin-vue "^6.2.1"
vue-eslint-parser "^7.0.0"
-"@gitlab/svgs@1.147.0":
- version "1.147.0"
- resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.147.0.tgz#1b2cc986cb3219609136cab641e2c384d724700f"
- integrity sha512-KnjN7ms7bEPajYl7q0nKv7HMKtqR/JxCVSBRGXH5ezkeGKy4wb4yEYtvRK8no7ix+Iw4rc0KTqOwKp9nkl/KdA==
-
-"@gitlab/ui@17.10.1":
- version "17.10.1"
- resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-17.10.1.tgz#b78f39e4f1ee72ac6c8cdf6824f2703e6dd9fc6d"
- integrity sha512-k7jsB+Qc0WlS3Hy8F5456TV2uuOwOR07Qn6PlSI8AB2q3XgT9x95mAV6G6bZ4f2WDNYDrEWPkj+5ySds/IQjfQ==
+"@gitlab/svgs@1.148.0":
+ version "1.148.0"
+ resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.148.0.tgz#cb3fd68249d7e97d0c578bf443459a32370a6dba"
+ integrity sha512-5GJtUNjCBzEdfi1J3jZPr7UUsvZ1KYnzK3VkMPmp+t2GNWHtdqBmi3Y6WKTOWJo8qFIAJO0tIs6w7XMMCIUBCg==
+
+"@gitlab/ui@17.16.0":
+ version "17.16.0"
+ resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-17.16.0.tgz#6c69016234ab669b619096f3f2ba5328c94864b2"
+ integrity sha512-0h/Vvykh7AtgjikOsC86PUQ35P5BWFkHA9aQ/klYwlsjU395C4K/zyEibxsg0fs3jivGyzpQh1pQKwTsZEq/Tw==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"