summaryrefslogtreecommitdiff
path: root/app/models/ci
diff options
context:
space:
mode:
Diffstat (limited to 'app/models/ci')
-rw-r--r--app/models/ci/bridge.rb12
-rw-r--r--app/models/ci/build.rb52
-rw-r--r--app/models/ci/build_pending_state.rb12
-rw-r--r--app/models/ci/build_trace_chunk.rb88
-rw-r--r--app/models/ci/build_trace_chunks/database.rb2
-rw-r--r--app/models/ci/build_trace_chunks/redis.rb4
-rw-r--r--app/models/ci/daily_build_group_report_result.rb2
-rw-r--r--app/models/ci/job_artifact.rb17
-rw-r--r--app/models/ci/pipeline.rb98
-rw-r--r--app/models/ci/pipeline_artifact.rb29
-rw-r--r--app/models/ci/pipeline_enums.rb73
-rw-r--r--app/models/ci/ref.rb2
-rw-r--r--app/models/ci/runner.rb2
13 files changed, 221 insertions, 172 deletions
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index 58c26e8c806..1697067f633 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -35,6 +35,10 @@ module Ci
end
end
+ event :pending do
+ transition all => :pending
+ end
+
event :manual do
transition all => :manual
end
@@ -48,6 +52,14 @@ module Ci
raise NotImplementedError
end
+ def self.with_preloads
+ preload(
+ :metadata,
+ downstream_pipeline: [project: [:route, { namespace: :route }]],
+ project: [:namespace]
+ )
+ end
+
def schedule_downstream_pipeline!
raise InvalidBridgeTypeError unless downstream_project
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index af4e6bb0494..99580a52e96 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -38,14 +38,17 @@ module Ci
has_one :deployment, as: :deployable, class_name: 'Deployment'
has_one :resource, class_name: 'Ci::Resource', inverse_of: :build
+ has_one :pending_state, class_name: 'Ci::BuildPendingState', inverse_of: :build
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
- has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
+ has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, inverse_of: :build
has_many :report_results, class_name: 'Ci::BuildReportResult', inverse_of: :build
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_job_id
+ has_many :pages_deployments, inverse_of: :ci_build
+
Ci::JobArtifact.file_types.each do |key, value|
has_one :"job_artifacts_#{key}", -> { where(file_type: value) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
end
@@ -90,9 +93,9 @@ module Ci
Ci::BuildMetadata.scoped_build.with_interruptible.select(:id))
end
- scope :unstarted, ->() { where(runner_id: nil) }
- scope :ignore_failures, ->() { where(allow_failure: false) }
- scope :with_downloadable_artifacts, ->() do
+ scope :unstarted, -> { where(runner_id: nil) }
+ scope :ignore_failures, -> { where(allow_failure: false) }
+ scope :with_downloadable_artifacts, -> do
where('EXISTS (?)',
Ci::JobArtifact.select(1)
.where('ci_builds.id = ci_job_artifacts.job_id')
@@ -104,11 +107,11 @@ module Ci
where('EXISTS (?)', ::Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').merge(query))
end
- scope :with_archived_trace, ->() do
+ scope :with_archived_trace, -> do
with_existing_job_artifacts(Ci::JobArtifact.trace)
end
- scope :without_archived_trace, ->() do
+ scope :without_archived_trace, -> do
where('NOT EXISTS (?)', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').trace)
end
@@ -139,11 +142,11 @@ module Ci
.includes(:metadata, :job_artifacts_metadata)
end
- scope :with_artifacts_not_expired, ->() { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.current) }
- scope :with_expired_artifacts, ->() { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.current) }
- scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
- scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + %i[manual]) }
- scope :scheduled_actions, ->() { where(when: :delayed, status: COMPLETED_STATUSES + %i[scheduled]) }
+ scope :with_artifacts_not_expired, -> { with_downloadable_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.current) }
+ scope :with_expired_artifacts, -> { with_downloadable_artifacts.where('artifacts_expire_at < ?', Time.current) }
+ scope :last_month, -> { where('created_at > ?', Date.today - 1.month) }
+ scope :manual_actions, -> { where(when: :manual, status: COMPLETED_STATUSES + %i[manual]) }
+ scope :scheduled_actions, -> { where(when: :delayed, status: COMPLETED_STATUSES + %i[scheduled]) }
scope :ref_protected, -> { where(protected: true) }
scope :with_live_trace, -> { where('EXISTS (?)', Ci::BuildTraceChunk.where('ci_builds.id = ci_build_trace_chunks.build_id').select(1)) }
scope :with_stale_live_trace, -> { with_live_trace.finished_before(12.hours.ago) }
@@ -175,7 +178,6 @@ module Ci
end
scope :queued_before, ->(time) { where(arel_table[:queued_at].lt(time)) }
- scope :order_id_desc, -> { order('ci_builds.id DESC') }
scope :preload_project_and_pipeline_project, -> do
preload(Ci::Pipeline::PROJECT_ROUTE_AND_NAMESPACE_ROUTE,
@@ -213,6 +215,10 @@ module Ci
.execute(build)
# rubocop: enable CodeReuse/ServiceClass
end
+
+ def with_preloads
+ preload(:job_artifacts_archive, :job_artifacts, project: [:namespace])
+ end
end
state_machine :status do
@@ -647,6 +653,10 @@ module Ci
!artifacts_expired? && artifacts_file&.exists?
end
+ def locked_artifacts?
+ pipeline.artifacts_locked? && artifacts_file&.exists?
+ end
+
# This method is similar to #artifacts? but it includes the artifacts
# locking mechanics. A new method was created to prevent breaking existing
# behavior and avoid introducing N+1s.
@@ -867,13 +877,17 @@ module Ci
options.dig(:release)&.any?
end
- def hide_secrets(trace)
+ def hide_secrets(data, metrics = ::Gitlab::Ci::Trace::Metrics.new)
return unless trace
- trace = trace.dup
- Gitlab::Ci::MaskSecret.mask!(trace, project.runners_token) if project
- Gitlab::Ci::MaskSecret.mask!(trace, token) if token
- trace
+ data.dup.tap do |trace|
+ Gitlab::Ci::MaskSecret.mask!(trace, project.runners_token) if project
+ Gitlab::Ci::MaskSecret.mask!(trace, token) if token
+
+ if trace != data
+ metrics.increment_trace_operation(operation: :mutated)
+ end
+ end
end
def serializable_hash(options = {})
@@ -945,6 +959,10 @@ module Ci
var[:value]&.to_i if var
end
+ def remove_pending_state!
+ pending_state.try(:delete)
+ end
+
private
def auto_retry
diff --git a/app/models/ci/build_pending_state.rb b/app/models/ci/build_pending_state.rb
new file mode 100644
index 00000000000..45f323adec2
--- /dev/null
+++ b/app/models/ci/build_pending_state.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+class Ci::BuildPendingState < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ belongs_to :build, class_name: 'Ci::Build', foreign_key: :build_id
+
+ enum state: Ci::Stage.statuses
+ enum failure_reason: CommitStatus.failure_reasons
+
+ validates :build, presence: true
+end
diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb
index 407802baf09..444742062d9 100644
--- a/app/models/ci/build_trace_chunk.rb
+++ b/app/models/ci/build_trace_chunk.rb
@@ -2,14 +2,17 @@
module Ci
class BuildTraceChunk < ApplicationRecord
- include FastDestroyAll
+ extend ::Gitlab::Ci::Model
+ include ::FastDestroyAll
+ include ::Checksummable
include ::Gitlab::ExclusiveLeaseHelpers
- extend Gitlab::Ci::Model
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
default_value_for :data_store, :redis
+ after_create { metrics.increment_trace_operation(operation: :chunked) }
+
CHUNK_SIZE = 128.kilobytes
WRITE_LOCK_RETRY = 10
WRITE_LOCK_SLEEP = 0.01.seconds
@@ -25,6 +28,8 @@ module Ci
fog: 3
}
+ scope :live, -> { redis }
+
class << self
def all_stores
@all_stores ||= self.data_stores.keys
@@ -60,8 +65,6 @@ module Ci
end
end
- ##
- # Data is memoized for optimizing #size and #end_offset
def data
@data ||= get_data.to_s
end
@@ -80,11 +83,11 @@ module Ci
in_lock(*lock_params) { unsafe_append_data!(new_data, offset) }
- schedule_to_persist if full?
+ schedule_to_persist! if full?
end
def size
- @size ||= current_store.size(self) || data&.bytesize
+ @size ||= @data&.bytesize || current_store.size(self) || data&.bytesize
end
def start_offset
@@ -100,35 +103,68 @@ module Ci
end
def persist_data!
- in_lock(*lock_params) do # Write operation is atomic
- unsafe_persist_to!(self.class.persistable_store)
- end
+ in_lock(*lock_params) { unsafe_persist_data! }
+ end
+
+ def schedule_to_persist!
+ return if persisted?
+
+ Ci::BuildTraceChunkFlushWorker.perform_async(id)
+ end
+
+ def persisted?
+ !redis?
+ end
+
+ def live?
+ redis?
+ end
+
+ ##
+ # Build trace chunk is final (the last one that we do not expect to ever
+ # become full) when a runner submitted a build pending state and there is
+ # no chunk with higher index in the database.
+ #
+ def final?
+ build.pending_state.present? &&
+ build.trace_chunks.maximum(:chunk_index).to_i == chunk_index
end
private
- def unsafe_persist_to!(new_store)
+ def get_data
+ # Redis / database return UTF-8 encoded string by default
+ current_store.data(self)&.force_encoding(Encoding::BINARY)
+ end
+
+ def unsafe_persist_data!(new_store = self.class.persistable_store)
return if data_store == new_store.to_s
- current_data = get_data
+ current_data = data
+ old_store_class = current_store
+ current_size = current_data&.bytesize.to_i
- unless current_data&.bytesize.to_i == CHUNK_SIZE
+ unless current_size == CHUNK_SIZE || final?
raise FailedToPersistDataError, 'Data is not fulfilled in a bucket'
end
- old_store_class = current_store
-
self.raw_data = nil
self.data_store = new_store
+ self.checksum = crc32(current_data)
+
+ ##
+ # We need to so persist data then save a new store identifier before we
+ # remove data from the previous store to make this operation
+ # trasnaction-safe. `unsafe_set_data! calls `save!` because of this
+ # reason.
+ #
+ # TODO consider using callbacks and state machine to remove old data
+ #
unsafe_set_data!(current_data)
old_store_class.delete_data(self)
end
- def get_data
- current_store.data(self)&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
- end
-
def unsafe_set_data!(value)
raise ArgumentError, 'New data size exceeds chunk size' if value.bytesize > CHUNK_SIZE
@@ -148,6 +184,8 @@ module Ci
end
current_store.append_data(self, value, offset).then do |stored|
+ metrics.increment_trace_operation(operation: :appended)
+
raise ArgumentError, 'Trace appended incorrectly' if stored != new_size
end
@@ -157,16 +195,6 @@ module Ci
save! if changed?
end
- def schedule_to_persist
- return if data_persisted?
-
- Ci::BuildTraceChunkFlushWorker.perform_async(id)
- end
-
- def data_persisted?
- !redis?
- end
-
def full?
size == CHUNK_SIZE
end
@@ -181,5 +209,9 @@ module Ci
retries: WRITE_LOCK_RETRY,
sleep_sec: WRITE_LOCK_SLEEP }]
end
+
+ def metrics
+ @metrics ||= ::Gitlab::Ci::Trace::Metrics.new
+ end
end
end
diff --git a/app/models/ci/build_trace_chunks/database.rb b/app/models/ci/build_trace_chunks/database.rb
index 3b8e23510d9..ea8072099c6 100644
--- a/app/models/ci/build_trace_chunks/database.rb
+++ b/app/models/ci/build_trace_chunks/database.rb
@@ -29,7 +29,7 @@ module Ci
new_data = truncated_data + new_data
end
- model.raw_data = new_data
+ set_data(model, new_data)
model.raw_data.to_s.bytesize
end
diff --git a/app/models/ci/build_trace_chunks/redis.rb b/app/models/ci/build_trace_chunks/redis.rb
index 0ae563f6ce8..58d50b39c11 100644
--- a/app/models/ci/build_trace_chunks/redis.rb
+++ b/app/models/ci/build_trace_chunks/redis.rb
@@ -41,9 +41,9 @@ module Ci
end
end
- def set_data(model, data)
+ def set_data(model, new_data)
Gitlab::Redis::SharedState.with do |redis|
- redis.set(key(model), data, ex: CHUNK_REDIS_TTL)
+ redis.set(key(model), new_data, ex: CHUNK_REDIS_TTL)
end
end
diff --git a/app/models/ci/daily_build_group_report_result.rb b/app/models/ci/daily_build_group_report_result.rb
index d6617b8c2eb..e6f02f2e4f3 100644
--- a/app/models/ci/daily_build_group_report_result.rb
+++ b/app/models/ci/daily_build_group_report_result.rb
@@ -11,6 +11,8 @@ module Ci
validates :data, json_schema: { filename: "daily_build_group_report_result_data" }
+ scope :with_included_projects, -> { includes(:project) }
+
def self.upsert_reports(data)
upsert_all(data, unique_by: :index_daily_build_group_report_results_unique_columns) if data.any?
end
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 75c3ce98c95..8bbb92e319f 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -12,8 +12,6 @@ module Ci
include FileStoreMounter
extend Gitlab::Ci::Model
- NotSupportedAdapterError = Class.new(StandardError)
-
ignore_columns :locked, remove_after: '2020-07-22', remove_with: '13.4'
TEST_REPORT_FILE_TYPES = %w[junit].freeze
@@ -163,7 +161,6 @@ module Ci
where(file_type: types)
end
- scope :expired, -> (limit) { where('expire_at < ?', Time.current).limit(limit) }
scope :downloadable, -> { where(file_type: DOWNLOADABLE_TYPES) }
scope :unlocked, -> { joins(job: :pipeline).merge(::Ci::Pipeline.unlocked).order(expire_at: :desc) }
@@ -271,16 +268,6 @@ module Ci
end
end
- def each_blob(&blk)
- unless file_format_adapter_class
- raise NotSupportedAdapterError, 'This file format requires a dedicated adapter'
- end
-
- file.open do |stream|
- file_format_adapter_class.new(stream).each_blob(&blk)
- end
- end
-
def self.archived_trace_exists_for?(job_id)
where(job_id: job_id).trace.take&.file&.file&.exists?
end
@@ -298,10 +285,6 @@ module Ci
private
- def file_format_adapter_class
- FILE_FORMAT_ADAPTERS[file_format.to_sym]
- end
-
def set_size
self.size = file.size
end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 7762328d274..47eba685afe 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -19,6 +19,8 @@ module Ci
PROJECT_ROUTE_AND_NAMESPACE_ROUTE = {
project: [:project_feature, :route, { namespace: :route }]
}.freeze
+ CONFIG_EXTENSION = '.gitlab-ci.yml'
+ DEFAULT_CONFIG_PATH = CONFIG_EXTENSION
BridgeStatusError = Class.new(StandardError)
@@ -104,15 +106,15 @@ module Ci
after_create :keep_around_commits, unless: :importing?
- # We use `Ci::PipelineEnums.sources` here so that EE can more easily extend
+ # We use `Enums::Ci::Pipeline.sources` here so that EE can more easily extend
# this `Hash` with new values.
- enum_with_nil source: ::Ci::PipelineEnums.sources
+ enum_with_nil source: Enums::Ci::Pipeline.sources
- enum_with_nil config_source: ::Ci::PipelineEnums.config_sources
+ enum_with_nil config_source: Enums::Ci::Pipeline.config_sources
- # We use `Ci::PipelineEnums.failure_reasons` here so that EE can more easily
+ # We use `Enums::Ci::Pipeline.failure_reasons` here so that EE can more easily
# extend this `Hash` with new values.
- enum failure_reason: ::Ci::PipelineEnums.failure_reasons
+ enum failure_reason: Enums::Ci::Pipeline.failure_reasons
enum locked: { unlocked: 0, artifacts_locked: 1 }
@@ -229,7 +231,12 @@ module Ci
end
after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
- next unless pipeline.bridge_triggered?
+ pipeline.run_after_commit do
+ ::Ci::Pipelines::CreateArtifactWorker.perform_async(pipeline.id)
+ end
+ end
+
+ after_transition any => ::Ci::Pipeline.completed_statuses do |pipeline|
next unless pipeline.bridge_waiting?
pipeline.run_after_commit do
@@ -254,7 +261,7 @@ module Ci
scope :internal, -> { where(source: internal_sources) }
scope :no_child, -> { where.not(source: :parent_pipeline) }
- scope :ci_sources, -> { where(config_source: ::Ci::PipelineEnums.ci_config_sources_values) }
+ scope :ci_sources, -> { where(source: Enums::Ci::Pipeline.ci_sources.values) }
scope :for_user, -> (user) { where(user: user) }
scope :for_sha, -> (sha) { where(sha: sha) }
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
@@ -483,6 +490,12 @@ module Ci
end
end
+ def git_commit_timestamp
+ strong_memoize(:git_commit_timestamp) do
+ commit.try(:timestamp)
+ end
+ end
+
def before_sha
super || Gitlab::Git::BLANK_SHA
end
@@ -539,12 +552,6 @@ module Ci
end
# rubocop: enable CodeReuse/ServiceClass
- def mark_as_processable_after_stage(stage_idx)
- builds.skipped.after_stage(stage_idx).find_each do |build|
- Gitlab::OptimisticLocking.retry_lock(build, &:process)
- end
- end
-
def lazy_ref_commit
return unless ::Gitlab::Ci::Features.pipeline_latest?
@@ -647,7 +654,7 @@ module Ci
def config_path
return unless repository_source? || unknown_source?
- project.ci_config_path.presence || '.gitlab-ci.yml'
+ project.ci_config_path_or_default
end
def has_yaml_errors?
@@ -669,8 +676,10 @@ module Ci
messages.select(&:error?)
end
- def warning_messages
- messages.select(&:warning?)
+ def warning_messages(limit: nil)
+ messages.select(&:warning?).tap do |warnings|
+ break warnings.take(limit) if limit
+ end
end
# Manually set the notes for a Ci::Pipeline
@@ -766,6 +775,7 @@ module Ci
variables.append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title.to_s)
variables.append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description.to_s)
variables.append(key: 'CI_COMMIT_REF_PROTECTED', value: (!!protected_ref?).to_s)
+ variables.append(key: 'CI_COMMIT_TIMESTAMP', value: git_commit_timestamp.to_s)
# legacy variables
variables.append(key: 'CI_BUILD_REF', value: sha)
@@ -810,11 +820,17 @@ module Ci
all_merge_requests.order(id: :desc)
end
- # If pipeline is a child of another pipeline, include the parent
- # and the siblings, otherwise return only itself and children.
def same_family_pipeline_ids
- parent = parent_pipeline || self
- [parent.id] + parent.child_pipelines.pluck(:id)
+ if ::Gitlab::Ci::Features.child_of_child_pipeline_enabled?(project)
+ ::Gitlab::Ci::PipelineObjectHierarchy.new(
+ base_and_ancestors(same_project: true), options: { same_project: true }
+ ).base_and_descendants.select(:id)
+ else
+ # If pipeline is a child of another pipeline, include the parent
+ # and the siblings, otherwise return only itself and children.
+ parent = parent_pipeline || self
+ [parent.id] + parent.child_pipelines.pluck(:id)
+ end
end
def bridge_triggered?
@@ -858,12 +874,26 @@ module Ci
builds.latest.with_reports(reports_scope)
end
+ def builds_with_coverage
+ builds.with_coverage
+ end
+
def has_reports?(reports_scope)
complete? && latest_report_builds(reports_scope).exists?
end
+ def has_coverage_reports?
+ pipeline_artifacts&.has_code_coverage?
+ end
+
+ def can_generate_coverage_reports?
+ has_reports?(Ci::JobArtifact.coverage_reports)
+ end
+
def test_report_summary
- Gitlab::Ci::Reports::TestReportSummary.new(latest_builds_report_results)
+ strong_memoize(:test_report_summary) do
+ Gitlab::Ci::Reports::TestReportSummary.new(latest_builds_report_results)
+ end
end
def test_reports
@@ -1008,7 +1038,11 @@ module Ci
end
def cacheable?
- Ci::PipelineEnums.ci_config_sources.key?(config_source.to_sym)
+ !dangling?
+ end
+
+ def dangling?
+ Enums::Ci::Pipeline.dangling_sources.key?(source.to_sym)
end
def source_ref_path
@@ -1029,6 +1063,26 @@ module Ci
self.ci_ref = Ci::Ref.ensure_for(self)
end
+ def base_and_ancestors(same_project: false)
+ # Without using `unscoped`, caller scope is also included into the query.
+ # Using `unscoped` here will be redundant after Rails 6.1
+ ::Gitlab::Ci::PipelineObjectHierarchy
+ .new(self.class.unscoped.where(id: id), options: { same_project: same_project })
+ .base_and_ancestors
+ end
+
+ # We need `base_and_ancestors` in a specific order to "break" when needed.
+ # If we use `find_each`, then the order is broken.
+ # rubocop:disable Rails/FindEach
+ def reset_ancestor_bridges!
+ base_and_ancestors.includes(:source_bridge).each do |pipeline|
+ break unless pipeline.bridge_waiting?
+
+ pipeline.source_bridge.pending!
+ end
+ end
+ # rubocop:enable Rails/FindEach
+
private
def add_message(severity, content)
diff --git a/app/models/ci/pipeline_artifact.rb b/app/models/ci/pipeline_artifact.rb
index e7f51977ccd..b6db8cad667 100644
--- a/app/models/ci/pipeline_artifact.rb
+++ b/app/models/ci/pipeline_artifact.rb
@@ -5,33 +5,44 @@
module Ci
class PipelineArtifact < ApplicationRecord
extend Gitlab::Ci::Model
+ include UpdateProjectStatistics
include Artifactable
include FileStoreMounter
-
- FILE_STORE_SUPPORTED = [
- ObjectStorage::Store::LOCAL,
- ObjectStorage::Store::REMOTE
- ].freeze
+ include Presentable
FILE_SIZE_LIMIT = 10.megabytes.freeze
+ EXPIRATION_DATE = 1.week.freeze
+
+ DEFAULT_FILE_NAMES = {
+ code_coverage: 'code_coverage.json'
+ }.freeze
belongs_to :project, class_name: "Project", inverse_of: :pipeline_artifacts
belongs_to :pipeline, class_name: "Ci::Pipeline", inverse_of: :pipeline_artifacts
validates :pipeline, :project, :file_format, :file, presence: true
- validates :file_store, presence: true, inclusion: { in: FILE_STORE_SUPPORTED }
+ validates :file_store, presence: true, inclusion: { in: ObjectStorage::SUPPORTED_STORES }
validates :size, presence: true, numericality: { less_than_or_equal_to: FILE_SIZE_LIMIT }
validates :file_type, presence: true
mount_file_store_uploader Ci::PipelineArtifactUploader
- before_save :set_size, if: :file_changed?
+
+ update_project_statistics project_statistics_name: :pipeline_artifacts_size
enum file_type: {
code_coverage: 1
}
- def set_size
- self.size = file.size
+ def self.has_code_coverage?
+ where(file_type: :code_coverage).exists?
+ end
+
+ def self.find_with_code_coverage
+ find_by(file_type: :code_coverage)
+ end
+
+ def present
+ super(presenter_class: "Ci::PipelineArtifacts::#{self.file_type.camelize}Presenter".constantize)
end
end
end
diff --git a/app/models/ci/pipeline_enums.rb b/app/models/ci/pipeline_enums.rb
deleted file mode 100644
index 9d108ff0fa4..00000000000
--- a/app/models/ci/pipeline_enums.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- module PipelineEnums
- # Returns the `Hash` to use for creating the `failure_reason` enum for
- # `Ci::Pipeline`.
- def self.failure_reasons
- {
- unknown_failure: 0,
- config_error: 1,
- external_validation_failure: 2
- }
- end
-
- # Returns the `Hash` to use for creating the `sources` enum for
- # `Ci::Pipeline`.
- def self.sources
- {
- unknown: nil,
- push: 1,
- web: 2,
- trigger: 3,
- schedule: 4,
- api: 5,
- external: 6,
- # TODO: Rename `pipeline` to `cross_project_pipeline` in 13.0
- # https://gitlab.com/gitlab-org/gitlab/issues/195991
- pipeline: 7,
- chat: 8,
- webide: 9,
- merge_request_event: 10,
- external_pull_request_event: 11,
- parent_pipeline: 12,
- ondemand_dast_scan: 13
- }
- end
-
- # Returns the `Hash` to use for creating the `config_sources` enum for
- # `Ci::Pipeline`.
- def self.config_sources
- {
- unknown_source: nil,
- repository_source: 1,
- auto_devops_source: 2,
- webide_source: 3,
- remote_source: 4,
- external_project_source: 5,
- bridge_source: 6,
- parameter_source: 7
- }
- end
-
- def self.ci_config_sources
- config_sources.slice(
- :unknown_source,
- :repository_source,
- :auto_devops_source,
- :remote_source,
- :external_project_source
- )
- end
-
- def self.ci_config_sources_values
- ci_config_sources.values
- end
-
- def self.non_ci_config_source_values
- config_sources.values - ci_config_sources.values
- end
- end
-end
-
-Ci::PipelineEnums.prepend_if_ee('EE::Ci::PipelineEnums')
diff --git a/app/models/ci/ref.rb b/app/models/ci/ref.rb
index 3d8823728e7..6e9b8416c10 100644
--- a/app/models/ci/ref.rb
+++ b/app/models/ci/ref.rb
@@ -33,8 +33,6 @@ module Ci
state :still_failing, value: 5
after_transition any => [:fixed, :success] do |ci_ref|
- next unless ::Gitlab::Ci::Features.keep_latest_artifacts_for_ref_enabled?(ci_ref.project)
-
ci_ref.run_after_commit do
Ci::PipelineSuccessUnlockArtifactsWorker.perform_async(ci_ref.last_finished_pipeline_id)
end
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index 00ee45740bd..86879b9dc68 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -52,7 +52,7 @@ module Ci
has_many :runner_namespaces, inverse_of: :runner
has_many :groups, through: :runner_namespaces
- has_one :last_build, ->() { order('id DESC') }, class_name: 'Ci::Build'
+ has_one :last_build, -> { order('id DESC') }, class_name: 'Ci::Build'
before_save :ensure_token