summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-01-18 00:24:03 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2022-01-18 00:24:03 +0000
commitcfa5faaae0d9ac9a3c1fafb1a41e07b298029f48 (patch)
treeb7ce1807d00c85cb94cbdc4a031562623e5343d6
parentab5b519cca2356dd2b84a7fffc6ddbd6dc43a0ab (diff)
downloadgitlab-ce-cfa5faaae0d9ac9a3c1fafb1a41e07b298029f48.tar.gz
Add latest changes from gitlab-org/gitlab@14-6-stable-ee
-rw-r--r--.gitlab/issue_templates/Geo Replicate a new Git repository type.md2
-rw-r--r--.gitlab/issue_templates/Geo Replicate a new blob type.md2
-rw-r--r--app/services/ci/after_requeue_job_service.rb45
-rw-r--r--config/feature_flags/development/ci_order_subsequent_jobs_by_stage.yml8
-rw-r--r--db/migrate/20211126115449_encrypt_static_objects_external_storage_auth_token.rb2
-rw-r--r--lib/gitlab/git/rugged_impl/commit.rb3
-rw-r--r--spec/factories/ci/builds.rb3
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb10
-rw-r--r--spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb22
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb60
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb57
11 files changed, 172 insertions, 42 deletions
diff --git a/.gitlab/issue_templates/Geo Replicate a new Git repository type.md b/.gitlab/issue_templates/Geo Replicate a new Git repository type.md
index f4f70a38a27..3c482105a22 100644
--- a/.gitlab/issue_templates/Geo Replicate a new Git repository type.md
+++ b/.gitlab/issue_templates/Geo Replicate a new Git repository type.md
@@ -488,6 +488,8 @@ That's all of the required database changes.
module Geo
class CoolWidgetState < ApplicationRecord
+ include EachBatch
+
self.primary_key = :cool_widget_id
belongs_to :cool_widget, inverse_of: :cool_widget_state
diff --git a/.gitlab/issue_templates/Geo Replicate a new blob type.md b/.gitlab/issue_templates/Geo Replicate a new blob type.md
index 57652bd903a..d2fc7307c88 100644
--- a/.gitlab/issue_templates/Geo Replicate a new blob type.md
+++ b/.gitlab/issue_templates/Geo Replicate a new blob type.md
@@ -452,6 +452,8 @@ That's all of the required database changes.
``` ruby
module Geo
class CoolWidgetState < ApplicationRecord
+ include EachBatch
+
self.primary_key = :cool_widget_id
belongs_to :cool_widget, inverse_of: :cool_widget_state
diff --git a/app/services/ci/after_requeue_job_service.rb b/app/services/ci/after_requeue_job_service.rb
index 9101ae91967..ee0ae6651ca 100644
--- a/app/services/ci/after_requeue_job_service.rb
+++ b/app/services/ci/after_requeue_job_service.rb
@@ -3,39 +3,50 @@
module Ci
class AfterRequeueJobService < ::BaseService
def execute(processable)
- process_subsequent_jobs(processable)
- reset_source_bridge(processable)
+ @processable = processable
+
+ process_subsequent_jobs
+ reset_source_bridge
end
private
- def process_subsequent_jobs(processable)
- (stage_dependent_jobs(processable) | needs_dependent_jobs(processable))
- .each do |processable|
- process(processable)
+ def process_subsequent_jobs
+ dependent_jobs.each do |job|
+ process(job)
end
end
- def reset_source_bridge(processable)
- processable.pipeline.reset_source_bridge!(current_user)
+ def reset_source_bridge
+ @processable.pipeline.reset_source_bridge!(current_user)
+ end
+
+ def dependent_jobs
+ if ::Feature.enabled?(:ci_order_subsequent_jobs_by_stage, @processable.pipeline.project, default_enabled: :yaml)
+ stage_dependent_jobs
+ .or(needs_dependent_jobs.except(:preload))
+ .ordered_by_stage
+ else
+ stage_dependent_jobs | needs_dependent_jobs
+ end
end
- def process(processable)
- Gitlab::OptimisticLocking.retry_lock(processable, name: 'ci_requeue_job') do |processable|
- processable.process(current_user)
+ def process(job)
+ Gitlab::OptimisticLocking.retry_lock(job, name: 'ci_requeue_job') do |job|
+ job.process(current_user)
end
end
- def skipped_jobs(processable)
- processable.pipeline.processables.skipped
+ def stage_dependent_jobs
+ skipped_jobs.after_stage(@processable.stage_idx)
end
- def stage_dependent_jobs(processable)
- skipped_jobs(processable).after_stage(processable.stage_idx)
+ def needs_dependent_jobs
+ skipped_jobs.scheduling_type_dag.with_needs([@processable.name])
end
- def needs_dependent_jobs(processable)
- skipped_jobs(processable).scheduling_type_dag.with_needs([processable.name])
+ def skipped_jobs
+ @skipped_jobs ||= @processable.pipeline.processables.skipped
end
end
end
diff --git a/config/feature_flags/development/ci_order_subsequent_jobs_by_stage.yml b/config/feature_flags/development/ci_order_subsequent_jobs_by_stage.yml
new file mode 100644
index 00000000000..dfc4ab3bad3
--- /dev/null
+++ b/config/feature_flags/development/ci_order_subsequent_jobs_by_stage.yml
@@ -0,0 +1,8 @@
+---
+name: ci_order_subsequent_jobs_by_stage
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/77528
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/349977
+milestone: '14.7'
+type: development
+group: group::pipeline authoring
+default_enabled: false
diff --git a/db/migrate/20211126115449_encrypt_static_objects_external_storage_auth_token.rb b/db/migrate/20211126115449_encrypt_static_objects_external_storage_auth_token.rb
index 9ce034b0065..92ba54d4c89 100644
--- a/db/migrate/20211126115449_encrypt_static_objects_external_storage_auth_token.rb
+++ b/db/migrate/20211126115449_encrypt_static_objects_external_storage_auth_token.rb
@@ -13,6 +13,8 @@ class EncryptStaticObjectsExternalStorageAuthToken < Gitlab::Database::Migration
ApplicationSetting.reset_column_information
ApplicationSetting.encrypted_token_is_null.plaintext_token_is_not_null.find_each do |application_setting|
+ next if application_setting.static_objects_external_storage_auth_token.empty?
+
token_encrypted = Gitlab::CryptoHelper.aes256_gcm_encrypt(application_setting.static_objects_external_storage_auth_token)
application_setting.update!(static_objects_external_storage_auth_token_encrypted: token_encrypted)
end
diff --git a/lib/gitlab/git/rugged_impl/commit.rb b/lib/gitlab/git/rugged_impl/commit.rb
index 0607b151de2..cf547414b0d 100644
--- a/lib/gitlab/git/rugged_impl/commit.rb
+++ b/lib/gitlab/git/rugged_impl/commit.rb
@@ -31,6 +31,9 @@ module Gitlab
oids.map { |oid| rugged_find(repo, oid) }
.compact
.map { |commit| decorate(repo, commit) }
+ # Match Gitaly's list_commits_by_oid behavior
+ rescue ::Gitlab::Git::Repository::NoRepository
+ []
end
override :find_commit
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 98023334894..e6eaebc9b6b 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -5,6 +5,7 @@ FactoryBot.define do
name { 'test' }
add_attribute(:protected) { false }
created_at { 'Di 29. Okt 09:50:00 CET 2013' }
+ scheduling_type { 'stage' }
pending
options do
@@ -33,6 +34,8 @@ FactoryBot.define do
end
trait :dependent do
+ scheduling_type { 'dag' }
+
transient do
sequence(:needed_name) { |n| "dependency #{n}" }
needed { association(:ci_build, name: needed_name, pipeline: pipeline) }
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 11510daf9c0..de342444c15 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -486,6 +486,16 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
expect(commits.first.sha).to eq(SeedRepo::Commit::ID)
expect(commits.second.sha).to eq(SeedRepo::FirstCommit::ID)
end
+
+ context 'when repo does not exist' do
+ let(:no_repository) { Gitlab::Git::Repository.new('default', '@does-not-exist/project', '', 'bogus/project') }
+
+ it 'returns empty commits' do
+ commits = described_class.batch_by_oid(no_repository, oids)
+
+ expect(commits.count).to eq(0)
+ end
+ end
end
context 'when oids is empty' do
diff --git a/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb b/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb
index bc8b7c56676..bf4094eaa49 100644
--- a/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb
+++ b/spec/migrations/20211126115449_encrypt_static_objects_external_storage_auth_token_spec.rb
@@ -53,4 +53,26 @@ RSpec.describe EncryptStaticObjectsExternalStorageAuthToken, :migration do
end
end
end
+
+ context 'when static_objects_external_storage_auth_token is empty string' do
+ it 'does not break' do
+ settings = application_settings.create!
+ settings.update_column(:static_objects_external_storage_auth_token, '')
+
+ reversible_migration do |migration|
+ migration.before -> {
+ settings = application_settings.first
+
+ expect(settings.static_objects_external_storage_auth_token).to eq('')
+ expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil
+ }
+ migration.after -> {
+ settings = application_settings.first
+
+ expect(settings.static_objects_external_storage_auth_token).to eq('')
+ expect(settings.static_objects_external_storage_auth_token_encrypted).to be_nil
+ }
+ end
+ end
+ end
end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
index 2a5a971fdac..2465bac7d10 100644
--- a/spec/services/ci/after_requeue_job_service_spec.rb
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -8,46 +8,56 @@ RSpec.describe Ci::AfterRequeueJobService do
let(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 0, name: 'build') }
- let!(:test1) { create(:ci_build, :success, pipeline: pipeline, stage_idx: 1) }
- let!(:test2) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: 1) }
- let!(:test3) { create(:ci_build, :skipped, :dependent, pipeline: pipeline, stage_idx: 1, needed: build) }
- let!(:deploy) { create(:ci_build, :skipped, :dependent, pipeline: pipeline, stage_idx: 2, needed: test3) }
-
- subject(:execute_service) { described_class.new(project, user).execute(build) }
-
- it 'marks subsequent skipped jobs as processable' do
- expect(test1.reload).to be_success
- expect(test2.reload).to be_skipped
- expect(test3.reload).to be_skipped
- expect(deploy.reload).to be_skipped
-
- execute_service
-
- expect(test1.reload).to be_success
- expect(test2.reload).to be_created
- expect(test3.reload).to be_created
- expect(deploy.reload).to be_created
+ let!(:build1) { create(:ci_build, name: 'build1', pipeline: pipeline, stage_idx: 0) }
+ let!(:test1) { create(:ci_build, :success, name: 'test1', pipeline: pipeline, stage_idx: 1) }
+ let!(:test2) { create(:ci_build, :skipped, name: 'test2', pipeline: pipeline, stage_idx: 1) }
+ let!(:test3) { create(:ci_build, :skipped, :dependent, name: 'test3', pipeline: pipeline, stage_idx: 1, needed: build1) }
+ let!(:deploy) { create(:ci_build, :skipped, :dependent, name: 'deploy', pipeline: pipeline, stage_idx: 2, needed: test3) }
+
+ subject(:execute_service) { described_class.new(project, user).execute(build1) }
+
+ shared_examples 'processing subsequent skipped jobs' do
+ it 'marks subsequent skipped jobs as processable' do
+ expect(test1.reload).to be_success
+ expect(test2.reload).to be_skipped
+ expect(test3.reload).to be_skipped
+ expect(deploy.reload).to be_skipped
+
+ execute_service
+
+ expect(test1.reload).to be_success
+ expect(test2.reload).to be_created
+ expect(test3.reload).to be_created
+ expect(deploy.reload).to be_created
+ end
end
+ it_behaves_like 'processing subsequent skipped jobs'
+
context 'when there is a job need from the same stage' do
- let!(:test4) do
+ let!(:build2) do
create(:ci_build,
:skipped,
:dependent,
+ name: 'build2',
pipeline: pipeline,
stage_idx: 0,
scheduling_type: :dag,
- needed: build)
+ needed: build1)
end
- it 'marks subsequent skipped jobs as processable' do
- expect { execute_service }.to change { test4.reload.status }.from('skipped').to('created')
+ shared_examples 'processing the same stage job' do
+ it 'marks subsequent skipped jobs as processable' do
+ expect { execute_service }.to change { build2.reload.status }.from('skipped').to('created')
+ end
end
+
+ it_behaves_like 'processing subsequent skipped jobs'
+ it_behaves_like 'processing the same stage job'
end
context 'when the pipeline is a downstream pipeline and the bridge is depended' do
- let!(:trigger_job) { create(:ci_bridge, :strategy_depend, status: 'success') }
+ let!(:trigger_job) { create(:ci_bridge, :strategy_depend, name: 'trigger_job', status: 'success') }
before do
create(:ci_sources_pipeline, pipeline: pipeline, source_job: trigger_job)
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 02f8f2dd99f..31e1b0a896d 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -1004,6 +1004,63 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
end
end
+ context 'when the dependency is stage-independent', :sidekiq_inline do
+ let(:config) do
+ <<-EOY
+ stages: [A, B]
+
+ A1:
+ stage: A
+ script: exit 0
+ when: manual
+
+ A2:
+ stage: A
+ script: exit 0
+ needs: [A1]
+
+ B:
+ stage: B
+ needs: [A2]
+ script: exit 0
+ EOY
+ end
+
+ let(:pipeline) do
+ Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ it 'processes subsequent jobs in the correct order when playing first job' do
+ expect(all_builds_names).to eq(%w[A1 A2 B])
+ expect(all_builds_statuses).to eq(%w[manual skipped skipped])
+
+ play_manual_action('A1')
+
+ expect(all_builds_names).to eq(%w[A1 A2 B])
+ expect(all_builds_statuses).to eq(%w[pending created created])
+ end
+
+ context 'when the FF ci_order_subsequent_jobs_by_stage is disabled' do
+ before do
+ stub_feature_flags(ci_order_subsequent_jobs_by_stage: false)
+ end
+
+ it 'processes subsequent jobs in an incorrect order when playing first job' do
+ expect(all_builds_names).to eq(%w[A1 A2 B])
+ expect(all_builds_statuses).to eq(%w[manual skipped skipped])
+
+ play_manual_action('A1')
+
+ expect(all_builds_names).to eq(%w[A1 A2 B])
+ expect(all_builds_statuses).to eq(%w[pending created skipped])
+ end
+ end
+ end
+
private
def all_builds