summaryrefslogtreecommitdiff
path: root/app/models/ci
diff options
context:
space:
mode:
Diffstat (limited to 'app/models/ci')
-rw-r--r--app/models/ci/bridge.rb2
-rw-r--r--app/models/ci/build.rb132
-rw-r--r--app/models/ci/build_dependencies.rb2
-rw-r--r--app/models/ci/build_metadata.rb7
-rw-r--r--app/models/ci/build_trace_chunk.rb33
-rw-r--r--app/models/ci/build_trace_chunks/database.rb4
-rw-r--r--app/models/ci/build_trace_chunks/fog.rb12
-rw-r--r--app/models/ci/build_trace_chunks/redis.rb87
-rw-r--r--app/models/ci/build_trace_chunks/redis_base.rb90
-rw-r--r--app/models/ci/build_trace_chunks/redis_trace_chunks.rb13
-rw-r--r--app/models/ci/build_trace_section.rb3
-rw-r--r--app/models/ci/job_artifact.rb18
-rw-r--r--app/models/ci/job_token/project_scope_link.rb33
-rw-r--r--app/models/ci/job_token/scope.rb43
-rw-r--r--app/models/ci/pending_build.rb18
-rw-r--r--app/models/ci/pipeline.rb20
-rw-r--r--app/models/ci/pipeline_schedule.rb12
-rw-r--r--app/models/ci/processable.rb2
-rw-r--r--app/models/ci/runner.rb98
-rw-r--r--app/models/ci/runner_namespace.rb5
-rw-r--r--app/models/ci/runner_project.rb5
-rw-r--r--app/models/ci/running_build.rb28
-rw-r--r--app/models/ci/stage.rb3
23 files changed, 478 insertions, 192 deletions
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index 352229c64da..577bca282ef 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -163,7 +163,7 @@ module Ci
def expanded_environment_name
end
- def instantized_environment
+ def persisted_environment
end
def execute_hooks
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 46fc87a6ea8..fdfffd9b0cd 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -11,6 +11,7 @@ module Ci
include Importable
include Ci::HasRef
include IgnorableColumns
+ include TaggableQueries
BuildArchivedError = Class.new(StandardError)
@@ -37,6 +38,8 @@ module Ci
has_one :deployment, as: :deployable, class_name: 'Deployment'
has_one :pending_state, class_name: 'Ci::BuildPendingState', inverse_of: :build
+ has_one :queuing_entry, class_name: 'Ci::PendingBuild', foreign_key: :build_id
+ has_one :runtime_metadata, class_name: 'Ci::RunningBuild', foreign_key: :build_id
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, inverse_of: :build
has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build
@@ -88,16 +91,6 @@ module Ci
end
end
- # Initializing an object instead of fetching `persisted_environment` for avoiding unnecessary queries.
- # We're planning to introduce a direct relationship between build and environment
- # in https://gitlab.com/gitlab-org/gitlab/-/issues/326445 to let us to preload
- # in batch.
- def instantized_environment
- return unless has_environment?
-
- ::Environment.new(project: self.project, name: self.expanded_environment_name)
- end
-
serialize :options # rubocop:disable Cop/ActiveRecordSerialize
serialize :yaml_variables, Gitlab::Serializer::Ci::Variables # rubocop:disable Cop/ActiveRecordSerialize
@@ -212,6 +205,8 @@ module Ci
end
scope :with_coverage, -> { where.not(coverage: nil) }
+ scope :without_coverage, -> { where(coverage: nil) }
+ scope :with_coverage_regex, -> { where.not(coverage_regex: nil) }
scope :for_project, -> (project_id) { where(project_id: project_id) }
@@ -222,6 +217,8 @@ module Ci
before_save :ensure_token
before_destroy { unscoped_project }
+ after_save :stick_build_if_status_changed
+
after_create unless: :importing? do |build|
run_after_commit { BuildHooksWorker.perform_async(build.id) }
end
@@ -304,12 +301,35 @@ module Ci
end
end
- after_transition any => [:pending] do |build|
+ # rubocop:disable CodeReuse/ServiceClass
+ after_transition any => [:pending] do |build, transition|
+ Ci::UpdateBuildQueueService.new.push(build, transition)
+
build.run_after_commit do
BuildQueueWorker.perform_async(id)
end
end
+ after_transition pending: any do |build, transition|
+ Ci::UpdateBuildQueueService.new.pop(build, transition)
+ end
+
+ after_transition any => [:running] do |build, transition|
+ Ci::UpdateBuildQueueService.new.track(build, transition)
+ end
+
+ after_transition running: any do |build, transition|
+ Ci::UpdateBuildQueueService.new.untrack(build, transition)
+
+ Ci::BuildRunnerSession.where(build: build).delete_all
+ end
+
+ # rubocop:enable CodeReuse/ServiceClass
+ #
+ after_transition pending: :running do |build|
+ build.ensure_metadata.update_timeout_state
+ end
+
after_transition pending: :running do |build|
build.deployment&.run
@@ -362,14 +382,6 @@ module Ci
end
end
- after_transition pending: :running do |build|
- build.ensure_metadata.update_timeout_state
- end
-
- after_transition running: any do |build|
- Ci::BuildRunnerSession.where(build: build).delete_all
- end
-
after_transition any => [:skipped, :canceled] do |build, transition|
if transition.to_name == :skipped
build.deployment&.skip
@@ -379,6 +391,33 @@ module Ci
end
end
+ def self.build_matchers(project)
+ unique_params = [
+ :protected,
+ Arel.sql("(#{arel_tag_names_array.to_sql})")
+ ]
+
+ group(*unique_params).pluck('array_agg(id)', *unique_params).map do |values|
+ Gitlab::Ci::Matching::BuildMatcher.new({
+ build_ids: values[0],
+ protected: values[1],
+ tag_list: values[2],
+ project: project
+ })
+ end
+ end
+
+ def build_matcher
+ strong_memoize(:build_matcher) do
+ Gitlab::Ci::Matching::BuildMatcher.new({
+ protected: protected?,
+ tag_list: tag_list,
+ build_ids: [id],
+ project: project
+ })
+ end
+ end
+
def auto_retry_allowed?
auto_retry.allowed?
end
@@ -442,7 +481,13 @@ module Ci
end
def retryable?
- !archived? && (success? || failed? || canceled?)
+ if Feature.enabled?(:prevent_retry_of_retried_jobs, project, default_enabled: :yaml)
+ return false if retried? || archived?
+
+ success? || failed? || canceled?
+ else
+ !archived? && (success? || failed? || canceled?)
+ end
end
def retries_count
@@ -560,6 +605,8 @@ module Ci
variables.concat(persisted_environment.predefined_variables)
+ variables.append(key: 'CI_ENVIRONMENT_ACTION', value: environment_action)
+
# Here we're passing unexpanded environment_url for runner to expand,
# and we need to make sure that CI_ENVIRONMENT_NAME and
# CI_ENVIRONMENT_SLUG so on are available for the URL be expanded.
@@ -716,22 +763,14 @@ module Ci
end
def any_runners_online?
- if Feature.enabled?(:runners_cached_states, project, default_enabled: :yaml)
- cache_for_online_runners do
- project.any_online_runners? { |runner| runner.match_build_if_online?(self) }
- end
- else
- project.any_active_runners? { |runner| runner.match_build_if_online?(self) }
+ cache_for_online_runners do
+ project.any_online_runners? { |runner| runner.match_build_if_online?(self) }
end
end
def any_runners_available?
- if Feature.enabled?(:runners_cached_states, project, default_enabled: :yaml)
- cache_for_available_runners do
- project.active_runners.exists?
- end
- else
- project.any_active_runners?
+ cache_for_available_runners do
+ project.active_runners.exists?
end
end
@@ -1039,6 +1078,28 @@ module Ci
options.dig(:allow_failure_criteria, :exit_codes).present?
end
+ def create_queuing_entry!
+ ::Ci::PendingBuild.upsert_from_build!(self)
+ end
+
+ ##
+ # We can have only one queuing entry or running build tracking entry,
+ # because there is a unique index on `build_id` in each table, but we need
+ # a relation to remove these entries more efficiently in a single statement
+ # without actually loading data.
+ #
+ def all_queuing_entries
+ ::Ci::PendingBuild.where(build_id: self.id)
+ end
+
+ def all_runtime_metadata
+ ::Ci::RunningBuild.where(build_id: self.id)
+ end
+
+ def shared_runner_build?
+ runner&.instance_type?
+ end
+
protected
def run_status_commit_hooks!
@@ -1049,6 +1110,13 @@ module Ci
private
+ def stick_build_if_status_changed
+ return unless saved_change_to_status?
+ return unless running?
+
+ ::Gitlab::Database::LoadBalancing::Sticking.stick(:build, id)
+ end
+
def status_commit_hooks
@status_commit_hooks ||= []
end
diff --git a/app/models/ci/build_dependencies.rb b/app/models/ci/build_dependencies.rb
index 716d919487d..d39e0411a79 100644
--- a/app/models/ci/build_dependencies.rb
+++ b/app/models/ci/build_dependencies.rb
@@ -143,8 +143,6 @@ module Ci
def specified_cross_pipeline_dependencies
strong_memoize(:specified_cross_pipeline_dependencies) do
- next [] unless Feature.enabled?(:ci_cross_pipeline_artifacts_download, processable.project, default_enabled: true)
-
specified_cross_dependencies.select { |dep| dep[:pipeline] && dep[:artifacts] }
end
end
diff --git a/app/models/ci/build_metadata.rb b/app/models/ci/build_metadata.rb
index 4094bdb26dc..bb2dac5cd43 100644
--- a/app/models/ci/build_metadata.rb
+++ b/app/models/ci/build_metadata.rb
@@ -10,6 +10,7 @@ module Ci
include Presentable
include ChronicDurationAttribute
include Gitlab::Utils::StrongMemoize
+ include IgnorableColumns
self.table_name = 'ci_builds_metadata'
@@ -21,8 +22,8 @@ module Ci
validates :build, presence: true
validates :secrets, json_schema: { filename: 'build_metadata_secrets' }
- serialize :config_options, Serializers::JSON # rubocop:disable Cop/ActiveRecordSerialize
- serialize :config_variables, Serializers::JSON # rubocop:disable Cop/ActiveRecordSerialize
+ serialize :config_options, Serializers::Json # rubocop:disable Cop/ActiveRecordSerialize
+ serialize :config_variables, Serializers::Json # rubocop:disable Cop/ActiveRecordSerialize
chronic_duration_attr_reader :timeout_human_readable, :timeout
@@ -37,6 +38,8 @@ module Ci
job_timeout_source: 4
}
+ ignore_column :build_id_convert_to_bigint, remove_with: '14.2', remove_after: '2021-08-22'
+
def update_timeout_state
timeout = timeout_with_highest_precedence
diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb
index 719511bbb8a..25f4a06088d 100644
--- a/app/models/ci/build_trace_chunk.rb
+++ b/app/models/ci/build_trace_chunk.rb
@@ -14,7 +14,13 @@ module Ci
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
- default_value_for :data_store, :redis
+ default_value_for :data_store do
+ if Feature.enabled?(:dedicated_redis_trace_chunks, type: :ops)
+ :redis_trace_chunks
+ else
+ :redis
+ end
+ end
after_create { metrics.increment_trace_operation(operation: :chunked) }
@@ -25,22 +31,22 @@ module Ci
FailedToPersistDataError = Class.new(StandardError)
- # Note: The ordering of this hash is related to the precedence of persist store.
- # The bottom item takes the highest precedence, and the top item takes the lowest precedence.
DATA_STORES = {
redis: 1,
database: 2,
- fog: 3
+ fog: 3,
+ redis_trace_chunks: 4
}.freeze
STORE_TYPES = DATA_STORES.keys.to_h do |store|
- [store, "Ci::BuildTraceChunks::#{store.capitalize}".constantize]
+ [store, "Ci::BuildTraceChunks::#{store.to_s.camelize}".constantize]
end.freeze
+ LIVE_STORES = %i[redis redis_trace_chunks].freeze
enum data_store: DATA_STORES
- scope :live, -> { redis }
- scope :persisted, -> { not_redis.order(:chunk_index) }
+ scope :live, -> { where(data_store: LIVE_STORES) }
+ scope :persisted, -> { where.not(data_store: LIVE_STORES).order(:chunk_index) }
class << self
def all_stores
@@ -48,8 +54,7 @@ module Ci
end
def persistable_store
- # get first available store from the back of the list
- all_stores.reverse.find { |store| get_store_class(store).available? }
+ STORE_TYPES[:fog].available? ? :fog : :database
end
def get_store_class(store)
@@ -85,16 +90,10 @@ module Ci
# change the behavior in CE.
#
def with_read_consistency(build, &block)
- return yield unless consistent_reads_enabled?(build)
-
::Gitlab::Database::Consistency
.with_read_consistency(&block)
end
- def consistent_reads_enabled?(build)
- Feature.enabled?(:gitlab_ci_trace_read_consistency, build.project, type: :development, default_enabled: true)
- end
-
##
# Sometimes we do not want to read raw data. This method makes it easier
# to find attributes that are just metadata excluding raw data.
@@ -201,7 +200,7 @@ module Ci
end
def flushed?
- !redis?
+ !live?
end
def migrated?
@@ -209,7 +208,7 @@ module Ci
end
def live?
- redis?
+ LIVE_STORES.include?(data_store.to_sym)
end
def <=>(other)
diff --git a/app/models/ci/build_trace_chunks/database.rb b/app/models/ci/build_trace_chunks/database.rb
index 7448afba4c2..895028778a9 100644
--- a/app/models/ci/build_trace_chunks/database.rb
+++ b/app/models/ci/build_trace_chunks/database.rb
@@ -3,10 +3,6 @@
module Ci
module BuildTraceChunks
class Database
- def available?
- true
- end
-
def keys(relation)
[]
end
diff --git a/app/models/ci/build_trace_chunks/fog.rb b/app/models/ci/build_trace_chunks/fog.rb
index cbf0c0a1696..fab85fae33d 100644
--- a/app/models/ci/build_trace_chunks/fog.rb
+++ b/app/models/ci/build_trace_chunks/fog.rb
@@ -3,10 +3,18 @@
module Ci
module BuildTraceChunks
class Fog
- def available?
+ def self.available?
object_store.enabled
end
+ def self.object_store
+ Gitlab.config.artifacts.object_store
+ end
+
+ def available?
+ self.class.available?
+ end
+
def data(model)
files.get(key(model))&.body
rescue Excon::Error::NotFound
@@ -85,7 +93,7 @@ module Ci
end
def object_store
- Gitlab.config.artifacts.object_store
+ self.class.object_store
end
def object_store_raw_config
diff --git a/app/models/ci/build_trace_chunks/redis.rb b/app/models/ci/build_trace_chunks/redis.rb
index 003ec107895..46f275636e1 100644
--- a/app/models/ci/build_trace_chunks/redis.rb
+++ b/app/models/ci/build_trace_chunks/redis.rb
@@ -2,92 +2,11 @@
module Ci
module BuildTraceChunks
- class Redis
- CHUNK_REDIS_TTL = 1.week
- LUA_APPEND_CHUNK = <<~EOS
- local key, new_data, offset = KEYS[1], ARGV[1], ARGV[2]
- local length = new_data:len()
- local expire = #{CHUNK_REDIS_TTL.seconds}
- local current_size = redis.call("strlen", key)
- offset = tonumber(offset)
-
- if offset == 0 then
- -- overwrite everything
- redis.call("set", key, new_data, "ex", expire)
- return redis.call("strlen", key)
- elseif offset > current_size then
- -- offset range violation
- return -1
- elseif offset + length >= current_size then
- -- efficiently append or overwrite and append
- redis.call("expire", key, expire)
- return redis.call("setrange", key, offset, new_data)
- else
- -- append and truncate
- local current_data = redis.call("get", key)
- new_data = current_data:sub(1, offset) .. new_data
- redis.call("set", key, new_data, "ex", expire)
- return redis.call("strlen", key)
- end
- EOS
-
- def available?
- true
- end
-
- def data(model)
- Gitlab::Redis::SharedState.with do |redis|
- redis.get(key(model))
- end
- end
-
- def set_data(model, new_data)
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(key(model), new_data, ex: CHUNK_REDIS_TTL)
- end
- end
-
- def append_data(model, new_data, offset)
- Gitlab::Redis::SharedState.with do |redis|
- redis.eval(LUA_APPEND_CHUNK, keys: [key(model)], argv: [new_data, offset])
- end
- end
-
- def size(model)
- Gitlab::Redis::SharedState.with do |redis|
- redis.strlen(key(model))
- end
- end
-
- def delete_data(model)
- delete_keys([[model.build_id, model.chunk_index]])
- end
-
- def keys(relation)
- relation.pluck(:build_id, :chunk_index)
- end
-
- def delete_keys(keys)
- return if keys.empty?
-
- keys = keys.map { |key| key_raw(*key) }
-
- Gitlab::Redis::SharedState.with do |redis|
- # https://gitlab.com/gitlab-org/gitlab/-/issues/224171
- Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
- redis.del(keys)
- end
- end
- end
-
+ class Redis < RedisBase
private
- def key(model)
- key_raw(model.build_id, model.chunk_index)
- end
-
- def key_raw(build_id, chunk_index)
- "gitlab:ci:trace:#{build_id.to_i}:chunks:#{chunk_index.to_i}"
+ def with_redis
+ Gitlab::Redis::SharedState.with { |redis| yield(redis) }
end
end
end
diff --git a/app/models/ci/build_trace_chunks/redis_base.rb b/app/models/ci/build_trace_chunks/redis_base.rb
new file mode 100644
index 00000000000..3b7a844d122
--- /dev/null
+++ b/app/models/ci/build_trace_chunks/redis_base.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+module Ci
+ module BuildTraceChunks
+ class RedisBase
+ CHUNK_REDIS_TTL = 1.week
+ LUA_APPEND_CHUNK = <<~EOS
+ local key, new_data, offset = KEYS[1], ARGV[1], ARGV[2]
+ local length = new_data:len()
+ local expire = #{CHUNK_REDIS_TTL.seconds}
+ local current_size = redis.call("strlen", key)
+ offset = tonumber(offset)
+
+ if offset == 0 then
+ -- overwrite everything
+ redis.call("set", key, new_data, "ex", expire)
+ return redis.call("strlen", key)
+ elseif offset > current_size then
+ -- offset range violation
+ return -1
+ elseif offset + length >= current_size then
+ -- efficiently append or overwrite and append
+ redis.call("expire", key, expire)
+ return redis.call("setrange", key, offset, new_data)
+ else
+ -- append and truncate
+ local current_data = redis.call("get", key)
+ new_data = current_data:sub(1, offset) .. new_data
+ redis.call("set", key, new_data, "ex", expire)
+ return redis.call("strlen", key)
+ end
+ EOS
+
+ def data(model)
+ with_redis do |redis|
+ redis.get(key(model))
+ end
+ end
+
+ def set_data(model, new_data)
+ with_redis do |redis|
+ redis.set(key(model), new_data, ex: CHUNK_REDIS_TTL)
+ end
+ end
+
+ def append_data(model, new_data, offset)
+ with_redis do |redis|
+ redis.eval(LUA_APPEND_CHUNK, keys: [key(model)], argv: [new_data, offset])
+ end
+ end
+
+ def size(model)
+ with_redis do |redis|
+ redis.strlen(key(model))
+ end
+ end
+
+ def delete_data(model)
+ delete_keys([[model.build_id, model.chunk_index]])
+ end
+
+ def keys(relation)
+ relation.pluck(:build_id, :chunk_index)
+ end
+
+ def delete_keys(keys)
+ return if keys.empty?
+
+ keys = keys.map { |key| key_raw(*key) }
+
+ with_redis do |redis|
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/224171
+ Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
+ redis.del(keys)
+ end
+ end
+ end
+
+ private
+
+ def key(model)
+ key_raw(model.build_id, model.chunk_index)
+ end
+
+ def key_raw(build_id, chunk_index)
+ "gitlab:ci:trace:#{build_id.to_i}:chunks:#{chunk_index.to_i}"
+ end
+ end
+ end
+end
diff --git a/app/models/ci/build_trace_chunks/redis_trace_chunks.rb b/app/models/ci/build_trace_chunks/redis_trace_chunks.rb
new file mode 100644
index 00000000000..06e315b0aaf
--- /dev/null
+++ b/app/models/ci/build_trace_chunks/redis_trace_chunks.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Ci
+ module BuildTraceChunks
+ class RedisTraceChunks < RedisBase
+ private
+
+ def with_redis
+ Gitlab::Redis::TraceChunks.with { |redis| yield(redis) }
+ end
+ end
+ end
+end
diff --git a/app/models/ci/build_trace_section.rb b/app/models/ci/build_trace_section.rb
index 5091e3ff04a..036f611a61c 100644
--- a/app/models/ci/build_trace_section.rb
+++ b/app/models/ci/build_trace_section.rb
@@ -4,11 +4,14 @@ module Ci
class BuildTraceSection < ApplicationRecord
extend SuppressCompositePrimaryKeyWarning
extend Gitlab::Ci::Model
+ include IgnorableColumns
belongs_to :build, class_name: 'Ci::Build'
belongs_to :project
belongs_to :section_name, class_name: 'Ci::BuildTraceSectionName'
validates :section_name, :build, :project, presence: true, allow_blank: false
+
+ ignore_column :build_id_convert_to_bigint, remove_with: '14.2', remove_after: '2021-08-22'
end
end
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 5248a80f710..6a7a2b3f6bd 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -18,7 +18,6 @@ module Ci
ACCESSIBILITY_REPORT_FILE_TYPES = %w[accessibility].freeze
NON_ERASABLE_FILE_TYPES = %w[trace].freeze
TERRAFORM_REPORT_FILE_TYPES = %w[terraform].freeze
- UNSUPPORTED_FILE_TYPES = %i[license_management].freeze
SAST_REPORT_TYPES = %w[sast].freeze
SECRET_DETECTION_REPORT_TYPES = %w[secret_detection].freeze
DEFAULT_FILE_NAMES = {
@@ -35,7 +34,6 @@ module Ci
dependency_scanning: 'gl-dependency-scanning-report.json',
container_scanning: 'gl-container-scanning-report.json',
dast: 'gl-dast-report.json',
- license_management: 'gl-license-management-report.json',
license_scanning: 'gl-license-scanning-report.json',
performance: 'performance.json',
browser_performance: 'browser-performance.json',
@@ -45,7 +43,7 @@ module Ci
dotenv: '.env',
cobertura: 'cobertura-coverage.xml',
terraform: 'tfplan.json',
- cluster_applications: 'gl-cluster-applications.json',
+ cluster_applications: 'gl-cluster-applications.json', # DEPRECATED: https://gitlab.com/gitlab-org/gitlab/-/issues/333441
requirements: 'requirements.json',
coverage_fuzzing: 'gl-coverage-fuzzing.json',
api_fuzzing: 'gl-api-fuzzing-report.json'
@@ -74,7 +72,6 @@ module Ci
dependency_scanning: :raw,
container_scanning: :raw,
dast: :raw,
- license_management: :raw,
license_scanning: :raw,
# All these file formats use `raw` as we need to store them uncompressed
@@ -102,7 +99,6 @@ module Ci
dependency_scanning
dotenv
junit
- license_management
license_scanning
lsif
metrics
@@ -124,7 +120,6 @@ module Ci
mount_file_store_uploader JobArtifactUploader
validates :file_format, presence: true, unless: :trace?, on: :create
- validate :validate_supported_file_format!, on: :create
validate :validate_file_format!, unless: :trace?, on: :create
before_save :set_size, if: :file_changed?
@@ -199,8 +194,7 @@ module Ci
container_scanning: 7, ## EE-specific
dast: 8, ## EE-specific
codequality: 9, ## EE-specific
- license_management: 10, ## EE-specific
- license_scanning: 101, ## EE-specific till 13.0
+ license_scanning: 101, ## EE-specific
performance: 11, ## EE-specific till 13.2
metrics: 12, ## EE-specific
metrics_referee: 13, ## runner referees
@@ -233,14 +227,6 @@ module Ci
hashed_path: 2
}
- def validate_supported_file_format!
- return if Feature.disabled?(:drop_license_management_artifact, project, default_enabled: true)
-
- if UNSUPPORTED_FILE_TYPES.include?(self.file_type&.to_sym)
- errors.add(:base, _("File format is no longer supported"))
- end
- end
-
def validate_file_format!
unless TYPE_AND_FORMAT_PAIRS[self.file_type&.to_sym] == self.file_format&.to_sym
errors.add(:base, _('Invalid file format with specified file type'))
diff --git a/app/models/ci/job_token/project_scope_link.rb b/app/models/ci/job_token/project_scope_link.rb
new file mode 100644
index 00000000000..283ad4a190d
--- /dev/null
+++ b/app/models/ci/job_token/project_scope_link.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+# The connection between a source project (which defines the job token scope)
+# and a target project which is the one allowed to be accessed by the job token.
+
+module Ci
+ module JobToken
+ class ProjectScopeLink < ApplicationRecord
+ self.table_name = 'ci_job_token_project_scope_links'
+
+ belongs_to :source_project, class_name: 'Project'
+ belongs_to :target_project, class_name: 'Project'
+ belongs_to :added_by, class_name: 'User'
+
+ scope :from_project, ->(project) { where(source_project: project) }
+ scope :to_project, ->(project) { where(target_project: project) }
+
+ validates :source_project, presence: true
+ validates :target_project, presence: true
+ validate :not_self_referential_link
+
+ private
+
+ def not_self_referential_link
+ return unless source_project && target_project
+
+ if source_project == target_project
+ self.errors.add(:target_project, _("can't be the same as the source project"))
+ end
+ end
+ end
+ end
+end
diff --git a/app/models/ci/job_token/scope.rb b/app/models/ci/job_token/scope.rb
new file mode 100644
index 00000000000..42cfdc21d66
--- /dev/null
+++ b/app/models/ci/job_token/scope.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+# This model represents the surface where a CI_JOB_TOKEN can be used.
+# A Scope is initialized with the project that the job token belongs to,
+# and indicates what are all the other projects that the token could access.
+#
+# By default a job token can only access its own project, which is the same
+# project that defines the scope.
+# By adding ScopeLinks to the scope we can allow other projects to be accessed
+# by the job token. This works as an allowlist of projects for a job token.
+#
+# If a project is not included in the scope we should not allow the job user
+# to access it since operations using CI_JOB_TOKEN should be considered untrusted.
+
+module Ci
+ module JobToken
+ class Scope
+ attr_reader :source_project
+
+ def initialize(project)
+ @source_project = project
+ end
+
+ def includes?(target_project)
+ # if the setting is disabled any project is considered to be in scope.
+ return true unless source_project.ci_job_token_scope_enabled?
+
+ target_project.id == source_project.id ||
+ Ci::JobToken::ProjectScopeLink.from_project(source_project).to_project(target_project).exists?
+ end
+
+ def all_projects
+ Project.from_union([
+ Project.id_in(source_project),
+ Project.where_exists(
+ Ci::JobToken::ProjectScopeLink
+ .from_project(source_project)
+ .where('projects.id = ci_job_token_project_scope_links.target_project_id'))
+ ], remove_duplicates: false)
+ end
+ end
+ end
+end
diff --git a/app/models/ci/pending_build.rb b/app/models/ci/pending_build.rb
new file mode 100644
index 00000000000..b9a8a44bd6b
--- /dev/null
+++ b/app/models/ci/pending_build.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module Ci
+ class PendingBuild < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ belongs_to :project
+ belongs_to :build, class_name: 'Ci::Build'
+
+ def self.upsert_from_build!(build)
+ entry = self.new(build: build, project: build.project, protected: build.protected?)
+
+ entry.validate!
+
+ self.upsert(entry.attributes.compact, returning: %w[build_id], unique_by: :build_id)
+ end
+ end
+end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index f0a2c074584..ae06bea5a02 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -644,6 +644,10 @@ module Ci
end
end
+ def update_builds_coverage
+ builds.with_coverage_regex.without_coverage.each(&:update_coverage)
+ end
+
def batch_lookup_report_artifact_for_file_type(file_type)
latest_report_artifacts
.values_at(*::Ci::JobArtifact.associated_file_types_for(file_type.to_s))
@@ -660,15 +664,9 @@ module Ci
# Return a hash of file type => array of 1 job artifact
def latest_report_artifacts
::Gitlab::SafeRequestStore.fetch("pipeline:#{self.id}:latest_report_artifacts") do
- # Note we use read_attribute(:project_id) to read the project
- # ID instead of self.project_id. The latter appears to load
- # the Project model. This extra filter doesn't appear to
- # affect query plan but included to ensure we don't leak the
- # wrong informaiton.
::Ci::JobArtifact.where(
id: job_artifacts.with_reports
.select('max(ci_job_artifacts.id) as id')
- .where(project_id: self.read_attribute(:project_id))
.group(:file_type)
)
.preload(:job)
@@ -928,6 +926,12 @@ module Ci
Ci::Build.latest.where(pipeline: self_and_descendants)
end
+ def environments_in_self_and_descendants
+ environment_ids = self_and_descendants.joins(:deployments).select(:'deployments.environment_id')
+
+ Environment.where(id: environment_ids)
+ end
+
# Without using `unscoped`, caller scope is also included into the query.
# Using `unscoped` here will be redundant after Rails 6.1
def self_and_descendants
@@ -1252,6 +1256,10 @@ module Ci
end
end
+ def build_matchers
+ self.builds.build_matchers(project)
+ end
+
private
def add_message(severity, content)
diff --git a/app/models/ci/pipeline_schedule.rb b/app/models/ci/pipeline_schedule.rb
index 9e5d517c1fe..effe2d95a99 100644
--- a/app/models/ci/pipeline_schedule.rb
+++ b/app/models/ci/pipeline_schedule.rb
@@ -3,6 +3,7 @@
module Ci
class PipelineSchedule < ApplicationRecord
extend Gitlab::Ci::Model
+ extend ::Gitlab::Utils::Override
include Importable
include StripAttribute
include CronSchedulable
@@ -55,6 +56,17 @@ module Ci
variables&.map(&:to_runner_variable) || []
end
+ override :set_next_run_at
+ def set_next_run_at
+ self.next_run_at = ::Ci::PipelineSchedules::CalculateNextRunService # rubocop: disable CodeReuse/ServiceClass
+ .new(project)
+ .execute(self, fallback_method: method(:calculate_next_run_at))
+ end
+
+ def daily_limit
+ project.actual_limits.limit_for(:ci_daily_pipeline_schedule_triggers)
+ end
+
private
def worker_cron_expression
diff --git a/app/models/ci/processable.rb b/app/models/ci/processable.rb
index 15c57550159..e2f257eab25 100644
--- a/app/models/ci/processable.rb
+++ b/app/models/ci/processable.rb
@@ -120,7 +120,7 @@ module Ci
raise NotImplementedError
end
- def instantized_environment
+ def persisted_environment
raise NotImplementedError
end
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index 8c877c2b818..71110ef0696 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -10,6 +10,8 @@ module Ci
include TokenAuthenticatable
include IgnorableColumns
include FeatureGate
+ include Gitlab::Utils::StrongMemoize
+ include TaggableQueries
add_authentication_token_field :token, encrypted: -> { Feature.enabled?(:ci_runners_tokens_optional_encryption, default_enabled: true) ? :optional : :required }
@@ -58,6 +60,7 @@ module Ci
scope :active, -> { where(active: true) }
scope :paused, -> { where(active: false) }
scope :online, -> { where('contacted_at > ?', online_contact_time_deadline) }
+ scope :recent, -> { where('ci_runners.created_at > :date OR ci_runners.contacted_at > :date', date: 3.months.ago) }
# The following query using negation is cheaper than using `contacted_at <= ?`
# because there are less runners online than have been created. The
# resulting query is quickly finding online ones and then uses the regular
@@ -131,6 +134,8 @@ module Ci
end
scope :order_contacted_at_asc, -> { order(contacted_at: :asc) }
+ scope :order_contacted_at_desc, -> { order(contacted_at: :desc) }
+ scope :order_created_at_asc, -> { order(created_at: :asc) }
scope :order_created_at_desc, -> { order(created_at: :desc) }
scope :with_tags, -> { preload(:tags) }
@@ -161,20 +166,17 @@ module Ci
numericality: { greater_than_or_equal_to: 0.0,
message: 'needs to be non-negative' }
+ validates :config, json_schema: { filename: 'ci_runner_config' }
+
# Searches for runners matching the given query.
#
- # This method uses ILIKE on PostgreSQL.
- #
- # This method performs a *partial* match on tokens, thus a query for "a"
- # will match any runner where the token contains the letter "a". As a result
- # you should *not* use this method for non-admin purposes as otherwise users
- # might be able to query a list of all runners.
+ # This method uses ILIKE on PostgreSQL for the description field and performs a full match on tokens.
#
# query - The search query as a String.
#
# Returns an ActiveRecord::Relation.
def self.search(query)
- fuzzy_search(query, [:token, :description])
+ where(token: query).or(fuzzy_search(query, [:description]))
end
def self.online_contact_time_deadline
@@ -190,13 +192,54 @@ module Ci
end
def self.order_by(order)
- if order == 'contacted_asc'
+ case order
+ when 'contacted_asc'
order_contacted_at_asc
+ when 'contacted_desc'
+ order_contacted_at_desc
+ when 'created_at_asc'
+ order_created_at_asc
else
order_created_at_desc
end
end
+ def self.runner_matchers
+ unique_params = [
+ :runner_type,
+ :public_projects_minutes_cost_factor,
+ :private_projects_minutes_cost_factor,
+ :run_untagged,
+ :access_level,
+ Arel.sql("(#{arel_tag_names_array.to_sql})")
+ ]
+
+ # we use distinct to de-duplicate data
+ distinct.pluck(*unique_params).map do |values|
+ Gitlab::Ci::Matching::RunnerMatcher.new({
+ runner_type: values[0],
+ public_projects_minutes_cost_factor: values[1],
+ private_projects_minutes_cost_factor: values[2],
+ run_untagged: values[3],
+ access_level: values[4],
+ tag_list: values[5]
+ })
+ end
+ end
+
+ def runner_matcher
+ strong_memoize(:runner_matcher) do
+ Gitlab::Ci::Matching::RunnerMatcher.new({
+ runner_type: runner_type,
+ public_projects_minutes_cost_factor: public_projects_minutes_cost_factor,
+ private_projects_minutes_cost_factor: private_projects_minutes_cost_factor,
+ run_untagged: run_untagged,
+ access_level: access_level,
+ tag_list: tag_list
+ })
+ end
+ end
+
def assign_to(project, current_user = nil)
if instance_type?
self.runner_type = :project_type
@@ -298,6 +341,14 @@ module Ci
end
def tick_runner_queue
+ ##
+ # We only stick a runner to primary database to be able to detect the
+ # replication lag in `EE::Ci::RegisterJobService#execute`. The
+ # intention here is not to execute `Ci::RegisterJobService#execute` on
+ # the primary database.
+ #
+ ::Gitlab::Database::LoadBalancing::Sticking.stick(:runner, id)
+
SecureRandom.hex.tap do |new_update|
::Gitlab::Workhorse.set_key_and_notify(runner_queue_key, new_update,
expire: RUNNER_QUEUE_EXPIRY_TIME, overwrite: true)
@@ -315,21 +366,24 @@ module Ci
end
def heartbeat(values)
- values = values&.slice(:version, :revision, :platform, :architecture, :ip_address) || {}
- values[:contacted_at] = Time.current
+ ##
+ # We can safely ignore writes performed by a runner heartbeat. We do
+ # not want to upgrade database connection proxy to use the primary
+ # database after heartbeat write happens.
+ #
+ ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
+ values = values&.slice(:version, :revision, :platform, :architecture, :ip_address, :config) || {}
+ values[:contacted_at] = Time.current
- cache_attributes(values)
+ cache_attributes(values)
- # We save data without validation, it will always change due to `contacted_at`
- self.update_columns(values) if persist_cached_data?
+ # We save data without validation, it will always change due to `contacted_at`
+ self.update_columns(values) if persist_cached_data?
+ end
end
def pick_build!(build)
- if Feature.enabled?(:ci_reduce_queries_when_ticking_runner_queue, self, default_enabled: :yaml)
- tick_runner_queue if matches_build?(build)
- else
- tick_runner_queue if can_pick?(build)
- end
+ tick_runner_queue if matches_build?(build)
end
def uncached_contacted_at
@@ -395,13 +449,7 @@ module Ci
end
def matches_build?(build)
- return false if self.ref_protected? && !build.protected?
-
- accepting_tags?(build)
- end
-
- def accepting_tags?(build)
- (run_untagged? || build.has_tags?) && (build.tag_list - tag_list).empty?
+ runner_matcher.matches?(build.build_matcher)
end
end
end
diff --git a/app/models/ci/runner_namespace.rb b/app/models/ci/runner_namespace.rb
index f819dda207d..41a4c9012ff 100644
--- a/app/models/ci/runner_namespace.rb
+++ b/app/models/ci/runner_namespace.rb
@@ -7,6 +7,7 @@ module Ci
self.limit_name = 'ci_registered_group_runners'
self.limit_scope = :group
+ self.limit_relation = :recent_runners
self.limit_feature_flag = :ci_runner_limits
belongs_to :runner, inverse_of: :runner_namespaces
@@ -16,6 +17,10 @@ module Ci
validates :runner_id, uniqueness: { scope: :namespace_id }
validate :group_runner_type
+ def recent_runners
+ ::Ci::Runner.belonging_to_group(namespace_id).recent
+ end
+
private
def group_runner_type
diff --git a/app/models/ci/runner_project.rb b/app/models/ci/runner_project.rb
index c26b8183b52..af2595ce4af 100644
--- a/app/models/ci/runner_project.rb
+++ b/app/models/ci/runner_project.rb
@@ -7,11 +7,16 @@ module Ci
self.limit_name = 'ci_registered_project_runners'
self.limit_scope = :project
+ self.limit_relation = :recent_runners
self.limit_feature_flag = :ci_runner_limits
belongs_to :runner, inverse_of: :runner_projects
belongs_to :project, inverse_of: :runner_projects
+ def recent_runners
+ ::Ci::Runner.belonging_to_project(project_id).recent
+ end
+
validates :runner_id, uniqueness: { scope: :project_id }
end
end
diff --git a/app/models/ci/running_build.rb b/app/models/ci/running_build.rb
new file mode 100644
index 00000000000..9446cfa05da
--- /dev/null
+++ b/app/models/ci/running_build.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module Ci
+ class RunningBuild < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ belongs_to :project
+ belongs_to :build, class_name: 'Ci::Build'
+ belongs_to :runner, class_name: 'Ci::Runner'
+
+ enum runner_type: ::Ci::Runner.runner_types
+
+ def self.upsert_shared_runner_build!(build)
+ unless build.shared_runner_build?
+ raise ArgumentError, 'build has not been picked by a shared runner'
+ end
+
+ entry = self.new(build: build,
+ project: build.project,
+ runner: build.runner,
+ runner_type: build.runner.runner_type)
+
+ entry.validate!
+
+ self.upsert(entry.attributes.compact, returning: %w[build_id], unique_by: :build_id)
+ end
+ end
+end
diff --git a/app/models/ci/stage.rb b/app/models/ci/stage.rb
index ef920b2d589..d00066b778d 100644
--- a/app/models/ci/stage.rb
+++ b/app/models/ci/stage.rb
@@ -7,6 +7,9 @@ module Ci
include Ci::HasStatus
include Gitlab::OptimisticLocking
include Presentable
+ include IgnorableColumns
+
+ ignore_column :id_convert_to_bigint, remove_with: '14.2', remove_after: '2021-08-22'
enum status: Ci::HasStatus::STATUSES_ENUM