summaryrefslogtreecommitdiff
path: root/app/models/ci
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-11-19 08:27:35 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-11-19 08:27:35 +0000
commit7e9c479f7de77702622631cff2628a9c8dcbc627 (patch)
treec8f718a08e110ad7e1894510980d2155a6549197 /app/models/ci
parente852b0ae16db4052c1c567d9efa4facc81146e88 (diff)
downloadgitlab-ce-7e9c479f7de77702622631cff2628a9c8dcbc627.tar.gz
Add latest changes from gitlab-org/gitlab@13-6-stable-eev13.6.0-rc42
Diffstat (limited to 'app/models/ci')
-rw-r--r--app/models/ci/bridge.rb6
-rw-r--r--app/models/ci/build.rb24
-rw-r--r--app/models/ci/build_trace_chunk.rb55
-rw-r--r--app/models/ci/build_trace_chunks/legacy_fog.rb77
-rw-r--r--app/models/ci/daily_build_group_report_result.rb28
-rw-r--r--app/models/ci/job_artifact.rb1
-rw-r--r--app/models/ci/pipeline.rb24
-rw-r--r--app/models/ci/processable.rb20
-rw-r--r--app/models/ci/test_case.rb35
-rw-r--r--app/models/ci/test_case_failure.rb29
10 files changed, 168 insertions, 131 deletions
diff --git a/app/models/ci/bridge.rb b/app/models/ci/bridge.rb
index 2e725e0baff..5b23cf46fdb 100644
--- a/app/models/ci/bridge.rb
+++ b/app/models/ci/bridge.rb
@@ -7,6 +7,7 @@ module Ci
include Importable
include AfterCommitQueue
include Ci::HasRef
+ extend ::Gitlab::Utils::Override
InvalidBridgeTypeError = Class.new(StandardError)
InvalidTransitionError = Class.new(StandardError)
@@ -203,8 +204,11 @@ module Ci
end
end
+ override :dependency_variables
def dependency_variables
- []
+ return [] unless ::Feature.enabled?(:ci_bridge_dependency_variables, project)
+
+ super
end
def target_revision_ref
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 9ff70ece947..84abd01786d 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -103,6 +103,10 @@ module Ci
)
end
+ scope :in_pipelines, ->(pipelines) do
+ where(pipeline: pipelines)
+ end
+
scope :with_existing_job_artifacts, ->(query) do
where('EXISTS (?)', ::Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').merge(query))
end
@@ -571,14 +575,6 @@ module Ci
end
end
- def dependency_variables
- return [] if all_dependencies.empty?
-
- Gitlab::Ci::Variables::Collection.new.concat(
- Ci::JobVariable.where(job: all_dependencies).dotenv_source
- )
- end
-
def features
{ trace_sections: true }
end
@@ -828,10 +824,6 @@ module Ci
Gitlab::Ci::Build::Credentials::Factory.new(self).create!
end
- def all_dependencies
- dependencies.all
- end
-
def has_valid_build_dependencies?
dependencies.valid?
end
@@ -994,12 +986,6 @@ module Ci
end
end
- def dependencies
- strong_memoize(:dependencies) do
- Ci::BuildDependencies.new(self)
- end
- end
-
def build_data
@build_data ||= Gitlab::DataBuilder::Build.build(self)
end
@@ -1059,7 +1045,7 @@ module Ci
jwt = Gitlab::Ci::Jwt.for_build(self)
variables.append(key: 'CI_JOB_JWT', value: jwt, public: false, masked: true)
- rescue OpenSSL::PKey::RSAError => e
+ rescue OpenSSL::PKey::RSAError, Gitlab::Ci::Jwt::NoSigningKeyError => e
Gitlab::ErrorTracking.track_exception(e)
end
end
diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb
index cf6eb159f52..ceefb6a8b8a 100644
--- a/app/models/ci/build_trace_chunk.rb
+++ b/app/models/ci/build_trace_chunk.rb
@@ -22,20 +22,26 @@ module Ci
FailedToPersistDataError = Class.new(StandardError)
- # Note: The ordering of this enum is related to the precedence of persist store.
+ # Note: The ordering of this hash is related to the precedence of persist store.
# The bottom item takes the highest precedence, and the top item takes the lowest precedence.
- enum data_store: {
+ DATA_STORES = {
redis: 1,
database: 2,
fog: 3
- }
+ }.freeze
+
+ STORE_TYPES = DATA_STORES.keys.map do |store|
+ [store, "Ci::BuildTraceChunks::#{store.capitalize}".constantize]
+ end.to_h.freeze
+
+ enum data_store: DATA_STORES
scope :live, -> { redis }
scope :persisted, -> { not_redis.order(:chunk_index) }
class << self
def all_stores
- @all_stores ||= self.data_stores.keys
+ STORE_TYPES.keys
end
def persistable_store
@@ -44,12 +50,11 @@ module Ci
end
def get_store_class(store)
- @stores ||= {}
+ store = store.to_sym
- # Can't memoize this because the feature flag may alter this
- return fog_store_class.new if store.to_sym == :fog
+ raise "Unknown store type: #{store}" unless STORE_TYPES.key?(store)
- @stores[store] ||= "Ci::BuildTraceChunks::#{store.capitalize}".constantize.new
+ STORE_TYPES[store].new
end
##
@@ -78,14 +83,6 @@ module Ci
def metadata_attributes
attribute_names - %w[raw_data]
end
-
- def fog_store_class
- if Feature.enabled?(:ci_trace_new_fog_store, default_enabled: true)
- Ci::BuildTraceChunks::Fog
- else
- Ci::BuildTraceChunks::LegacyFog
- end
- end
end
def data
@@ -108,7 +105,7 @@ module Ci
raise ArgumentError, 'Offset is out of range' if offset < 0 || offset > size
raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
- in_lock(*lock_params) { unsafe_append_data!(new_data, offset) }
+ in_lock(lock_key, **lock_params) { unsafe_append_data!(new_data, offset) }
schedule_to_persist! if full?
end
@@ -148,12 +145,13 @@ module Ci
# We are using optimistic locking combined with Redis locking to ensure
# that a chunk gets migrated properly.
#
- # We are catching an exception related to an exclusive lock not being
- # acquired because it is creating a lot of noise, and is a result of
- # duplicated workers running in parallel for the same build trace chunk.
+ # We are using until_executed deduplication strategy for workers,
+ # which should prevent duplicated workers running in parallel for the same build trace,
+ # and causing an exception related to an exclusive lock not being
+ # acquired
#
def persist_data!
- in_lock(*lock_params) do # exclusive Redis lock is acquired first
+ in_lock(lock_key, **lock_params) do # exclusive Redis lock is acquired first
raise FailedToPersistDataError, 'Modifed build trace chunk detected' if has_changes_to_save?
self.reset.then do |chunk| # we ensure having latest lock_version
@@ -162,6 +160,8 @@ module Ci
end
rescue FailedToObtainLockError
metrics.increment_trace_operation(operation: :stalled)
+
+ raise FailedToPersistDataError, 'Data migration failed due to a worker duplication'
rescue ActiveRecord::StaleObjectError
raise FailedToPersistDataError, <<~MSG
Data migration race condition detected
@@ -289,11 +289,16 @@ module Ci
build.trace_chunks.maximum(:chunk_index).to_i
end
+ def lock_key
+ "trace_write:#{build_id}:chunks:#{chunk_index}"
+ end
+
def lock_params
- ["trace_write:#{build_id}:chunks:#{chunk_index}",
- { ttl: WRITE_LOCK_TTL,
- retries: WRITE_LOCK_RETRY,
- sleep_sec: WRITE_LOCK_SLEEP }]
+ {
+ ttl: WRITE_LOCK_TTL,
+ retries: WRITE_LOCK_RETRY,
+ sleep_sec: WRITE_LOCK_SLEEP
+ }
end
def metrics
diff --git a/app/models/ci/build_trace_chunks/legacy_fog.rb b/app/models/ci/build_trace_chunks/legacy_fog.rb
deleted file mode 100644
index b710ed2890b..00000000000
--- a/app/models/ci/build_trace_chunks/legacy_fog.rb
+++ /dev/null
@@ -1,77 +0,0 @@
-# frozen_string_literal: true
-
-module Ci
- module BuildTraceChunks
- class LegacyFog
- def available?
- object_store.enabled
- end
-
- def data(model)
- connection.get_object(bucket_name, key(model))[:body]
- rescue Excon::Error::NotFound
- # If the object does not exist in the object storage, this method returns nil.
- end
-
- def set_data(model, new_data)
- connection.put_object(bucket_name, key(model), new_data)
- end
-
- def append_data(model, new_data, offset)
- if offset > 0
- truncated_data = data(model).to_s.byteslice(0, offset)
- new_data = truncated_data + new_data
- end
-
- set_data(model, new_data)
- new_data.bytesize
- end
-
- def size(model)
- data(model).to_s.bytesize
- end
-
- def delete_data(model)
- delete_keys([[model.build_id, model.chunk_index]])
- end
-
- def keys(relation)
- return [] unless available?
-
- relation.pluck(:build_id, :chunk_index)
- end
-
- def delete_keys(keys)
- keys.each do |key|
- connection.delete_object(bucket_name, key_raw(*key))
- end
- end
-
- private
-
- def key(model)
- key_raw(model.build_id, model.chunk_index)
- end
-
- def key_raw(build_id, chunk_index)
- "tmp/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log"
- end
-
- def bucket_name
- return unless available?
-
- object_store.remote_directory
- end
-
- def connection
- return unless available?
-
- @connection ||= ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
- end
-
- def object_store
- Gitlab.config.artifacts.object_store
- end
- end
- end
-end
diff --git a/app/models/ci/daily_build_group_report_result.rb b/app/models/ci/daily_build_group_report_result.rb
index e6f02f2e4f3..e9f3366b939 100644
--- a/app/models/ci/daily_build_group_report_result.rb
+++ b/app/models/ci/daily_build_group_report_result.rb
@@ -4,6 +4,7 @@ module Ci
class DailyBuildGroupReportResult < ApplicationRecord
extend Gitlab::Ci::Model
+ REPORT_WINDOW = 90.days
PARAM_TYPES = %w[coverage].freeze
belongs_to :last_pipeline, class_name: 'Ci::Pipeline', foreign_key: :last_pipeline_id
@@ -12,13 +13,30 @@ module Ci
validates :data, json_schema: { filename: "daily_build_group_report_result_data" }
scope :with_included_projects, -> { includes(:project) }
+ scope :by_projects, -> (ids) { where(project_id: ids) }
+ scope :with_coverage, -> { where("(data->'coverage') IS NOT NULL") }
+ scope :with_default_branch, -> { where(default_branch: true) }
+ scope :by_date, -> (start_date) { where(date: report_window(start_date)..Date.current) }
- def self.upsert_reports(data)
- upsert_all(data, unique_by: :index_daily_build_group_report_results_unique_columns) if data.any?
- end
+ store_accessor :data, :coverage
+
+ class << self
+ def upsert_reports(data)
+ upsert_all(data, unique_by: :index_daily_build_group_report_results_unique_columns) if data.any?
+ end
+
+ def recent_results(attrs, limit: nil)
+ where(attrs).order(date: :desc, group_name: :asc).limit(limit)
+ end
- def self.recent_results(attrs, limit: nil)
- where(attrs).order(date: :desc, group_name: :asc).limit(limit)
+ def report_window(start_date)
+ default_date = REPORT_WINDOW.ago.to_date
+ date = Date.parse(start_date) rescue default_date
+
+ [date, default_date].max
+ end
end
end
end
+
+Ci::DailyBuildGroupReportResult.prepend_if_ee('EE::Ci::DailyBuildGroupReportResult')
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index 02e17afdab0..7cedd13b407 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -169,6 +169,7 @@ module Ci
scope :downloadable, -> { where(file_type: DOWNLOADABLE_TYPES) }
scope :unlocked, -> { joins(job: :pipeline).merge(::Ci::Pipeline.unlocked).order(expire_at: :desc) }
+ scope :with_destroy_preloads, -> { includes(project: [:route, :statistics]) }
scope :scoped_project, -> { where('ci_job_artifacts.project_id = projects.id') }
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 684b6387ab1..8707d635e03 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -42,9 +42,16 @@ module Ci
belongs_to :external_pull_request
belongs_to :ci_ref, class_name: 'Ci::Ref', foreign_key: :ci_ref_id, inverse_of: :pipelines
- has_internal_id :iid, scope: :project, presence: false, track_if: -> { !importing? }, ensure_if: -> { !importing? }, init: ->(s) do
- s&.project&.all_pipelines&.maximum(:iid) || s&.project&.all_pipelines&.count
- end
+ has_internal_id :iid, scope: :project, presence: false,
+ track_if: -> { !importing? },
+ ensure_if: -> { !importing? },
+ init: ->(pipeline, scope) do
+ if pipeline
+ pipeline.project&.all_pipelines&.maximum(:iid) || pipeline.project&.all_pipelines&.count
+ elsif scope
+ ::Ci::Pipeline.where(**scope).maximum(:iid)
+ end
+ end
has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
@@ -270,6 +277,7 @@ module Ci
scope :internal, -> { where(source: internal_sources) }
scope :no_child, -> { where.not(source: :parent_pipeline) }
scope :ci_sources, -> { where(source: Enums::Ci::Pipeline.ci_sources.values) }
+ scope :ci_and_parent_sources, -> { where(source: Enums::Ci::Pipeline.ci_and_parent_sources.values) }
scope :for_user, -> (user) { where(user: user) }
scope :for_sha, -> (sha) { where(sha: sha) }
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
@@ -347,6 +355,14 @@ module Ci
end
end
+ def self.latest_running_for_ref(ref)
+ newest_first(ref: ref).running.take
+ end
+
+ def self.latest_failed_for_ref(ref)
+ newest_first(ref: ref).failed.take
+ end
+
# Returns a Hash containing the latest pipeline for every given
# commit.
#
@@ -926,7 +942,7 @@ module Ci
def accessibility_reports
Gitlab::Ci::Reports::AccessibilityReports.new.tap do |accessibility_reports|
- builds.latest.with_reports(Ci::JobArtifact.accessibility_reports).each do |build|
+ latest_report_builds(Ci::JobArtifact.accessibility_reports).each do |build|
build.collect_accessibility_reports!(accessibility_reports)
end
end
diff --git a/app/models/ci/processable.rb b/app/models/ci/processable.rb
index ac5785d9c91..6aaf6ac530b 100644
--- a/app/models/ci/processable.rb
+++ b/app/models/ci/processable.rb
@@ -103,5 +103,25 @@ module Ci
pipeline.ensure_scheduling_type!
reset
end
+
+ def dependency_variables
+ return [] if all_dependencies.empty?
+
+ Gitlab::Ci::Variables::Collection.new.concat(
+ Ci::JobVariable.where(job: all_dependencies).dotenv_source
+ )
+ end
+
+ def all_dependencies
+ dependencies.all
+ end
+
+ private
+
+ def dependencies
+ strong_memoize(:dependencies) do
+ Ci::BuildDependencies.new(self)
+ end
+ end
end
end
diff --git a/app/models/ci/test_case.rb b/app/models/ci/test_case.rb
new file mode 100644
index 00000000000..19ecc177436
--- /dev/null
+++ b/app/models/ci/test_case.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module Ci
+ class TestCase < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ validates :project, :key_hash, presence: true
+
+ has_many :test_case_failures, class_name: 'Ci::TestCaseFailure'
+
+ belongs_to :project
+
+ scope :by_project_and_keys, -> (project, keys) { where(project_id: project.id, key_hash: keys) }
+
+ class << self
+ def find_or_create_by_batch(project, test_case_keys)
+ # Insert records first. Existing ones will be skipped.
+ insert_all(test_case_attrs(project, test_case_keys))
+
+ # Find all matching records now that we are sure they all are persisted.
+ by_project_and_keys(project, test_case_keys)
+ end
+
+ private
+
+ def test_case_attrs(project, test_case_keys)
+ # NOTE: Rails 6.1 will add support for insert_all on relation so that
+ # we will be able to do project.test_cases.insert_all.
+ test_case_keys.map do |hashed_key|
+ { project_id: project.id, key_hash: hashed_key }
+ end
+ end
+ end
+ end
+end
diff --git a/app/models/ci/test_case_failure.rb b/app/models/ci/test_case_failure.rb
new file mode 100644
index 00000000000..8867b954240
--- /dev/null
+++ b/app/models/ci/test_case_failure.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Ci
+ class TestCaseFailure < ApplicationRecord
+ extend Gitlab::Ci::Model
+
+ REPORT_WINDOW = 14.days
+
+ validates :test_case, :build, :failed_at, presence: true
+
+ belongs_to :test_case, class_name: "Ci::TestCase", foreign_key: :test_case_id
+ belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
+
+ def self.recent_failures_count(project:, test_case_keys:, date_range: REPORT_WINDOW.ago..Time.current)
+ joins(:test_case)
+ .where(
+ ci_test_cases: {
+ project_id: project.id,
+ key_hash: test_case_keys
+ },
+ ci_test_case_failures: {
+ failed_at: date_range
+ }
+ )
+ .group(:key_hash)
+ .count('ci_test_case_failures.id')
+ end
+ end
+end