summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-03-05 06:07:51 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-03-05 06:07:51 +0000
commit00bd11b166a886742f04d38c0d2551e52ff51472 (patch)
treeaf0b118c5748fdaabda2f9c2a007969c1673d581
parenta0c1ba61c8e8c9195e3ad4deefc5c4cb5c6a1501 (diff)
downloadgitlab-ce-00bd11b166a886742f04d38c0d2551e52ff51472.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/controllers/application_controller.rb3
-rw-r--r--app/controllers/concerns/uploads_actions.rb5
-rw-r--r--changelogs/unreleased/55487-backfill-lfs-objects-v2.yml5
-rw-r--r--db/post_migrate/20200217091401_reschedule_link_lfs_objects.rb29
-rw-r--r--lib/gitlab/background_migration/link_lfs_objects.rb58
-rw-r--r--spec/controllers/application_controller_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/link_lfs_objects_spec.rb76
-rw-r--r--spec/migrations/reschedule_link_lfs_objects_spec.rb86
8 files changed, 259 insertions, 5 deletions
diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb
index 5a2eb2337aa..484e86964f6 100644
--- a/app/controllers/application_controller.rb
+++ b/app/controllers/application_controller.rb
@@ -26,6 +26,7 @@ class ApplicationController < ActionController::Base
before_action :ldap_security_check
around_action :sentry_context
before_action :default_headers
+ before_action :default_cache_headers
before_action :add_gon_variables, if: :html_request?
before_action :configure_permitted_parameters, if: :devise_controller?
before_action :require_email, unless: :devise_controller?
@@ -259,7 +260,9 @@ class ApplicationController < ActionController::Base
headers['X-XSS-Protection'] = '1; mode=block'
headers['X-UA-Compatible'] = 'IE=edge'
headers['X-Content-Type-Options'] = 'nosniff'
+ end
+ def default_cache_headers
if current_user
headers['Cache-Control'] = default_cache_control
headers['Pragma'] = 'no-cache' # HTTP 1.0 compatibility
diff --git a/app/controllers/concerns/uploads_actions.rb b/app/controllers/concerns/uploads_actions.rb
index 549a443b1a8..f489de42864 100644
--- a/app/controllers/concerns/uploads_actions.rb
+++ b/app/controllers/concerns/uploads_actions.rb
@@ -9,6 +9,7 @@ module UploadsActions
included do
prepend_before_action :set_request_format_from_path_extension
+ skip_before_action :default_cache_headers, only: :show
rescue_from FileUploader::InvalidSecret, with: :render_404
end
@@ -35,10 +36,6 @@ module UploadsActions
def show
return render_404 unless uploader&.exists?
- # We need to reset caching from the applications controller to get rid of the no-store value
- headers['Cache-Control'] = ''
- headers['Pragma'] = ''
-
ttl, directives = *cache_settings
ttl ||= 0
directives ||= { private: true, must_revalidate: true }
diff --git a/changelogs/unreleased/55487-backfill-lfs-objects-v2.yml b/changelogs/unreleased/55487-backfill-lfs-objects-v2.yml
new file mode 100644
index 00000000000..b751955913e
--- /dev/null
+++ b/changelogs/unreleased/55487-backfill-lfs-objects-v2.yml
@@ -0,0 +1,5 @@
+---
+title: Backfill LfsObjectsProject records of forks
+merge_request: 25343
+author:
+type: other
diff --git a/db/post_migrate/20200217091401_reschedule_link_lfs_objects.rb b/db/post_migrate/20200217091401_reschedule_link_lfs_objects.rb
new file mode 100644
index 00000000000..a7d1b42ef70
--- /dev/null
+++ b/db/post_migrate/20200217091401_reschedule_link_lfs_objects.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+class RescheduleLinkLfsObjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ MIGRATION = 'LinkLfsObjects'
+ BATCH_SIZE = 1_000
+
+ disable_ddl_transaction!
+
+ def up
+ forks = Gitlab::BackgroundMigration::LinkLfsObjects::Project.with_non_existing_lfs_objects
+
+ queue_background_migration_jobs_by_range_at_intervals(
+ forks,
+ MIGRATION,
+ BackgroundMigrationWorker.minimum_interval,
+ batch_size: BATCH_SIZE
+ )
+ end
+
+ def down
+ # No-op. No need to make this reversible. In case the jobs enqueued runs and
+ # fails at some point, some records will be created. When rescheduled, those
+ # records won't be re-created. It's also hard to track which records to clean
+ # up if ever.
+ end
+end
diff --git a/lib/gitlab/background_migration/link_lfs_objects.rb b/lib/gitlab/background_migration/link_lfs_objects.rb
index 69c03f617bf..3131b5d5125 100644
--- a/lib/gitlab/background_migration/link_lfs_objects.rb
+++ b/lib/gitlab/background_migration/link_lfs_objects.rb
@@ -6,6 +6,8 @@ module Gitlab
class LinkLfsObjects
# Model definition used for migration
class ForkNetworkMember < ActiveRecord::Base
+ include EachBatch
+
self.table_name = 'fork_network_members'
def self.with_non_existing_lfs_objects
@@ -23,8 +25,62 @@ module Gitlab
end
end
+ # Model definition used for migration
+ class Project < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'projects'
+
+ has_one :fork_network_member, class_name: 'LinkLfsObjects::ForkNetworkMember'
+
+ def self.with_non_existing_lfs_objects
+ fork_network_members =
+ ForkNetworkMember.with_non_existing_lfs_objects
+ .select(1)
+ .where('fork_network_members.project_id = projects.id')
+
+ where('EXISTS (?)', fork_network_members)
+ end
+ end
+
+ # Model definition used for migration
+ class LfsObjectsProject < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'lfs_objects_projects'
+ end
+
+ BATCH_SIZE = 1000
+
def perform(start_id, end_id)
- # no-op as some queries times out
+ forks =
+ Project
+ .with_non_existing_lfs_objects
+ .where(id: start_id..end_id)
+
+ forks.includes(:fork_network_member).find_each do |project|
+ LfsObjectsProject
+ .select("lfs_objects_projects.lfs_object_id, #{project.id}, NOW(), NOW()")
+ .where(project_id: project.fork_network_member.forked_from_project_id)
+ .each_batch(of: BATCH_SIZE) do |batch|
+ execute <<~SQL
+ INSERT INTO lfs_objects_projects (lfs_object_id, project_id, created_at, updated_at)
+ #{batch.to_sql}
+ SQL
+ end
+ end
+
+ logger.info(message: "LinkLfsObjects: created missing LfsObjectsProject for Projects #{forks.map(&:id).join(', ')}")
+ end
+
+ private
+
+ def execute(sql)
+ ::ActiveRecord::Base.connection.execute(sql)
+ end
+
+ def logger
+ @logger ||= Gitlab::BackgroundMigration::Logger.build
end
end
end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index bdac7369780..90f6697a8c0 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -725,6 +725,7 @@ describe ApplicationController do
get :index
expect(response.headers['Cache-Control']).to be_nil
+ expect(response.headers['Pragma']).to be_nil
end
end
@@ -735,6 +736,7 @@ describe ApplicationController do
get :index
expect(response.headers['Cache-Control']).to eq 'max-age=0, private, must-revalidate, no-store'
+ expect(response.headers['Pragma']).to eq 'no-cache'
end
it 'does not set the "no-store" header for XHR requests' do
diff --git a/spec/lib/gitlab/background_migration/link_lfs_objects_spec.rb b/spec/lib/gitlab/background_migration/link_lfs_objects_spec.rb
new file mode 100644
index 00000000000..aba903e2f26
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/link_lfs_objects_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::LinkLfsObjects, :migration, schema: 2020_02_10_062432 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:fork_networks) { table(:fork_networks) }
+ let(:fork_network_members) { table(:fork_network_members) }
+ let(:lfs_objects) { table(:lfs_objects) }
+ let(:lfs_objects_projects) { table(:lfs_objects_projects) }
+
+ let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
+
+ let!(:source_project) { projects.create(namespace_id: namespace.id) }
+ let!(:another_source_project) { projects.create(namespace_id: namespace.id) }
+ let!(:project) { projects.create(namespace_id: namespace.id) }
+ let!(:another_project) { projects.create(namespace_id: namespace.id) }
+ let!(:other_project) { projects.create(namespace_id: namespace.id) }
+ let!(:linked_project) { projects.create(namespace_id: namespace.id) }
+
+ let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
+ let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
+
+ let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
+ let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ # Create links between projects
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
+
+ [project, another_project, linked_project].each do |p|
+ fork_network_members.create(
+ fork_network_id: fork_network.id,
+ project_id: p.id,
+ forked_from_project_id: fork_network.root_project_id
+ )
+ end
+
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: other_project.id, forked_from_project_id: another_fork_network.root_project_id)
+
+ # Links LFS objects to some projects
+ [source_project, another_source_project, linked_project].each do |p|
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
+ lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
+ end
+ end
+
+ it 'creates LfsObjectsProject records for forks within the specified range of project IDs' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger).to receive(:info).twice
+ end
+
+ expect { subject.perform(project.id, other_project.id) }.to change { lfs_objects_projects.count }.by(6)
+
+ expect(lfs_object_ids_for(project)).to match_array(lfs_object_ids_for(source_project))
+ expect(lfs_object_ids_for(another_project)).to match_array(lfs_object_ids_for(source_project))
+ expect(lfs_object_ids_for(other_project)).to match_array(lfs_object_ids_for(another_source_project))
+
+ expect { subject.perform(project.id, other_project.id) }.not_to change { lfs_objects_projects.count }
+ end
+
+ context 'when it is not necessary to create LfsObjectProject records' do
+ it 'does not create LfsObjectProject records' do
+ expect { subject.perform(linked_project.id, linked_project.id) }
+ .not_to change { lfs_objects_projects.count }
+ end
+ end
+
+ def lfs_object_ids_for(project)
+ lfs_objects_projects.where(project_id: project.id).pluck(:lfs_object_id)
+ end
+end
diff --git a/spec/migrations/reschedule_link_lfs_objects_spec.rb b/spec/migrations/reschedule_link_lfs_objects_spec.rb
new file mode 100644
index 00000000000..6ce6e77f514
--- /dev/null
+++ b/spec/migrations/reschedule_link_lfs_objects_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200217091401_reschedule_link_lfs_objects.rb')
+
+describe RescheduleLinkLfsObjects, :migration, :sidekiq do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:fork_networks) { table(:fork_networks) }
+ let(:fork_network_members) { table(:fork_network_members) }
+ let(:lfs_objects) { table(:lfs_objects) }
+ let(:lfs_objects_projects) { table(:lfs_objects_projects) }
+
+ let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
+
+ let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
+ let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
+
+ let!(:source_project) { projects.create(namespace_id: namespace.id) }
+ let!(:another_source_project) { projects.create(namespace_id: namespace.id) }
+ let!(:project) { projects.create(namespace_id: namespace.id) }
+ let!(:another_project) { projects.create(namespace_id: namespace.id) }
+ let!(:other_project) { projects.create(namespace_id: namespace.id) }
+ let!(:linked_project) { projects.create(namespace_id: namespace.id) }
+
+ let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
+ let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
+
+ before do
+ # Create links between projects
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
+
+ [project, another_project, linked_project].each do |p|
+ fork_network_members.create(
+ fork_network_id: fork_network.id,
+ project_id: p.id,
+ forked_from_project_id: fork_network.root_project_id
+ )
+ end
+
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: other_project.id, forked_from_project_id: another_fork_network.root_project_id)
+ end
+
+ context 'when there are forks to be backfilled' do
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ # Links LFS objects to some projects
+ [source_project, another_source_project, linked_project].each do |p|
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
+ lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
+ end
+ end
+
+ it 'schedules background migration to link LFS objects' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, project.id, another_project.id)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, other_project.id, other_project.id)
+ end
+ end
+ end
+
+ context 'when there are no forks to be backfilled' do
+ before do
+ # Links LFS objects to all projects
+ projects.all.each do |p|
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
+ lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
+ end
+ end
+
+ it 'does not schedule any job' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(0)
+ end
+ end
+ end
+end