summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorMarin Jankovski <maxlazio@gmail.com>2019-07-03 11:55:56 +0200
committerMarin Jankovski <maxlazio@gmail.com>2019-07-03 11:55:56 +0200
commitc20c9e2940b0f94547246d05b7b526f0b1571027 (patch)
treec548960a37ab7447ff542e0844e838f973c118fb /lib
parent49d689fb3c7781c861f995aaafef4b224581020b (diff)
parent2ca9bda400c0ed647c3ef342dcc0aa56c558cebe (diff)
downloadgitlab-ce-c20c9e2940b0f94547246d05b7b526f0b1571027.tar.gz
Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce
Diffstat (limited to 'lib')
-rw-r--r--lib/api/boards.rb2
-rw-r--r--lib/api/boards_responses.rb2
-rw-r--r--lib/api/entities.rb4
-rw-r--r--lib/api/group_boards.rb2
-rw-r--r--lib/api/helpers/services_helpers.rb10
-rw-r--r--lib/api/releases.rb2
-rw-r--r--lib/api/users.rb1
-rw-r--r--lib/banzai/filter/relative_link_filter.rb6
-rw-r--r--lib/feature.rb4
-rw-r--r--lib/gitlab/auth/ip_rate_limiter.rb17
-rw-r--r--lib/gitlab/background_migration/create_fork_network_memberships_range.rb85
-rw-r--r--lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb13
-rw-r--r--lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb179
-rw-r--r--lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb29
-rw-r--r--lib/gitlab/background_migration/move_personal_snippet_files.rb82
-rw-r--r--lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb319
-rw-r--r--lib/gitlab/background_migration/populate_fork_networks_range.rb128
-rw-r--r--lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb33
-rw-r--r--lib/gitlab/ci/build/prerequisite/kubernetes_namespace.rb2
-rw-r--r--lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml20
-rw-r--r--lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml1
-rw-r--r--lib/gitlab/ci/trace.rb2
-rw-r--r--lib/gitlab/cleanup/orphan_job_artifact_files.rb132
-rw-r--r--lib/gitlab/cleanup/orphan_job_artifact_files_batch.rb80
-rw-r--r--lib/gitlab/cluster/lifecycle_events.rb3
-rw-r--r--lib/gitlab/database.rb38
-rw-r--r--lib/gitlab/database/migration_helpers.rb3
-rw-r--r--lib/gitlab/diff/lines_unfolder.rb2
-rw-r--r--lib/gitlab/git/raw_diff_change.rb4
-rw-r--r--lib/gitlab/graphql/copy_field_description.rb21
-rw-r--r--lib/gitlab/graphql/errors.rb1
-rw-r--r--lib/gitlab/graphql/find_argument_in_parent.rb32
-rw-r--r--lib/gitlab/graphql/loaders/pipeline_for_sha_loader.rb25
-rw-r--r--lib/gitlab/legacy_github_import/release_formatter.rb1
-rw-r--r--lib/gitlab/metrics/dashboard/base_service.rb8
-rw-r--r--lib/gitlab/metrics/system.rb14
-rw-r--r--lib/gitlab/optimistic_locking.rb1
-rw-r--r--lib/gitlab/search/found_blob.rb2
-rw-r--r--lib/gitlab/thread_memory_cache.rb15
-rw-r--r--lib/peek/views/redis.rb82
-rw-r--r--lib/tasks/gitlab/cleanup.rake25
-rw-r--r--lib/tasks/migrate/schema_check.rake20
-rw-r--r--lib/tasks/migrate/setup_postgresql.rake14
-rw-r--r--lib/tasks/yarn.rake2
44 files changed, 543 insertions, 925 deletions
diff --git a/lib/api/boards.rb b/lib/api/boards.rb
index b7c77730afb..4e31f74f18a 100644
--- a/lib/api/boards.rb
+++ b/lib/api/boards.rb
@@ -27,7 +27,7 @@ module API
end
get '/' do
authorize!(:read_board, user_project)
- present paginate(board_parent.boards), with: Entities::Board
+ present paginate(board_parent.boards.with_associations), with: Entities::Board
end
desc 'Find a project board' do
diff --git a/lib/api/boards_responses.rb b/lib/api/boards_responses.rb
index 86d9b24802f..68497a08fb8 100644
--- a/lib/api/boards_responses.rb
+++ b/lib/api/boards_responses.rb
@@ -11,7 +11,7 @@ module API
end
def board_lists
- board.lists.destroyable
+ board.destroyable_lists
end
def create_list
diff --git a/lib/api/entities.rb b/lib/api/entities.rb
index ac1dfb64a73..b9aa387ba61 100644
--- a/lib/api/entities.rb
+++ b/lib/api/entities.rb
@@ -1103,7 +1103,7 @@ module API
expose :project, using: Entities::BasicProjectDetails
expose :lists, using: Entities::List do |board|
- board.lists.destroyable
+ board.destroyable_lists
end
end
@@ -1188,8 +1188,10 @@ module API
MarkupHelper.markdown_field(entity, :description)
end
expose :created_at
+ expose :released_at
expose :author, using: Entities::UserBasic, if: -> (release, _) { release.author.present? }
expose :commit, using: Entities::Commit, if: lambda { |_, _| can_download_code? }
+ expose :upcoming_release?, as: :upcoming_release
expose :assets do
expose :assets_count, as: :count do |release, _|
diff --git a/lib/api/group_boards.rb b/lib/api/group_boards.rb
index 9a20ee8c8b9..feb2254963e 100644
--- a/lib/api/group_boards.rb
+++ b/lib/api/group_boards.rb
@@ -37,7 +37,7 @@ module API
use :pagination
end
get '/' do
- present paginate(board_parent.boards), with: Entities::Board
+ present paginate(board_parent.boards.with_associations), with: Entities::Board
end
end
diff --git a/lib/api/helpers/services_helpers.rb b/lib/api/helpers/services_helpers.rb
index cf2e9d01356..c4ecf55969c 100644
--- a/lib/api/helpers/services_helpers.rb
+++ b/lib/api/helpers/services_helpers.rb
@@ -462,31 +462,31 @@ module API
required: true,
name: :url,
type: String,
- desc: 'The base URL to the JIRA instance web interface which is being linked to this GitLab project. E.g., https://jira.example.com'
+ desc: 'The base URL to the Jira instance web interface which is being linked to this GitLab project. E.g., https://jira.example.com'
},
{
required: false,
name: :api_url,
type: String,
- desc: 'The base URL to the JIRA instance API. Web URL value will be used if not set. E.g., https://jira-api.example.com'
+ desc: 'The base URL to the Jira instance API. Web URL value will be used if not set. E.g., https://jira-api.example.com'
},
{
required: true,
name: :username,
type: String,
- desc: 'The username of the user created to be used with GitLab/JIRA'
+ desc: 'The username of the user created to be used with GitLab/Jira'
},
{
required: true,
name: :password,
type: String,
- desc: 'The password of the user created to be used with GitLab/JIRA'
+ desc: 'The password of the user created to be used with GitLab/Jira'
},
{
required: false,
name: :jira_issue_transition_id,
type: String,
- desc: 'The ID of a transition that moves issues to a closed state. You can find this number under the JIRA workflow administration (**Administration > Issues > Workflows**) by selecting **View** under **Operations** of the desired workflow of your project. The ID of each state can be found inside the parenthesis of each transition name under the **Transitions (id)** column ([see screenshot][trans]). By default, this ID is set to `2`'
+ desc: 'The ID of a transition that moves issues to a closed state. You can find this number under the Jira workflow administration (**Administration > Issues > Workflows**) by selecting **View** under **Operations** of the desired workflow of your project. The ID of each state can be found inside the parenthesis of each transition name under the **Transitions (id)** column ([see screenshot][trans]). By default, this ID is set to `2`'
}
],
'kubernetes' => [
diff --git a/lib/api/releases.rb b/lib/api/releases.rb
index 6b17f4317db..fdd8406388e 100644
--- a/lib/api/releases.rb
+++ b/lib/api/releases.rb
@@ -54,6 +54,7 @@ module API
requires :url, type: String
end
end
+ optional :released_at, type: DateTime, desc: 'The date when the release will be/was ready. Defaults to the current time.'
end
post ':id/releases' do
authorize_create_release!
@@ -77,6 +78,7 @@ module API
requires :tag_name, type: String, desc: 'The name of the tag', as: :tag
optional :name, type: String, desc: 'The name of the release'
optional :description, type: String, desc: 'Release notes with markdown support'
+ optional :released_at, type: DateTime, desc: 'The date when the release will be/was ready. Defaults to the current time.'
end
put ':id/releases/:tag_name', requirements: RELEASE_ENDPOINT_REQUIREMETS do
authorize_update_release!
diff --git a/lib/api/users.rb b/lib/api/users.rb
index 9ab5fa8d0bd..41418aa216c 100644
--- a/lib/api/users.rb
+++ b/lib/api/users.rb
@@ -158,6 +158,7 @@ module API
at_least_one_of :password, :reset_password
requires :name, type: String, desc: 'The name of the user'
requires :username, type: String, desc: 'The username of the user'
+ optional :force_random_password, type: Boolean, desc: 'Flag indicating a random password will be set'
use :optional_attributes
end
post do
diff --git a/lib/banzai/filter/relative_link_filter.rb b/lib/banzai/filter/relative_link_filter.rb
index bf84d7cddae..86f18679496 100644
--- a/lib/banzai/filter/relative_link_filter.rb
+++ b/lib/banzai/filter/relative_link_filter.rb
@@ -56,10 +56,10 @@ module Banzai
def process_link_to_upload_attr(html_attr)
path_parts = [Addressable::URI.unescape(html_attr.value)]
- if group
- path_parts.unshift(relative_url_root, 'groups', group.full_path, '-')
- elsif project
+ if project
path_parts.unshift(relative_url_root, project.full_path)
+ elsif group
+ path_parts.unshift(relative_url_root, 'groups', group.full_path, '-')
else
path_parts.unshift(relative_url_root)
end
diff --git a/lib/feature.rb b/lib/feature.rb
index cc9c9d44005..22420e95ea2 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -34,7 +34,9 @@ class Feature
begin
# We saw on GitLab.com, this database request was called 2300
# times/s. Let's cache it for a minute to avoid that load.
- Rails.cache.fetch('flipper:persisted_names', expires_in: 1.minute) { FlipperFeature.feature_names }
+ Gitlab::ThreadMemoryCache.cache_backend.fetch('flipper:persisted_names', expires_in: 1.minute) do
+ FlipperFeature.feature_names
+ end
end
end
diff --git a/lib/gitlab/auth/ip_rate_limiter.rb b/lib/gitlab/auth/ip_rate_limiter.rb
index 81e616fa20a..0b7055b3256 100644
--- a/lib/gitlab/auth/ip_rate_limiter.rb
+++ b/lib/gitlab/auth/ip_rate_limiter.rb
@@ -3,6 +3,8 @@
module Gitlab
module Auth
class IpRateLimiter
+ include ::Gitlab::Utils::StrongMemoize
+
attr_reader :ip
def initialize(ip)
@@ -37,7 +39,20 @@ module Gitlab
end
def ip_can_be_banned?
- config.ip_whitelist.exclude?(ip)
+ !trusted_ip?
+ end
+
+ def trusted_ip?
+ trusted_ips.any? { |netmask| netmask.include?(ip) }
+ end
+
+ def trusted_ips
+ strong_memoize(:trusted_ips) do
+ config.ip_whitelist.map do |proxy|
+ IPAddr.new(proxy)
+ rescue IPAddr::InvalidAddressError
+ end.compact
+ end
end
end
end
diff --git a/lib/gitlab/background_migration/create_fork_network_memberships_range.rb b/lib/gitlab/background_migration/create_fork_network_memberships_range.rb
deleted file mode 100644
index ccd1f9b4dba..00000000000
--- a/lib/gitlab/background_migration/create_fork_network_memberships_range.rb
+++ /dev/null
@@ -1,85 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class CreateForkNetworkMembershipsRange
- RESCHEDULE_DELAY = 15
-
- class ForkedProjectLink < ActiveRecord::Base
- self.table_name = 'forked_project_links'
- end
-
- def perform(start_id, end_id)
- log("Creating memberships for forks: #{start_id} - #{end_id}")
-
- insert_members(start_id, end_id)
-
- if missing_members?(start_id, end_id)
- BackgroundMigrationWorker.perform_in(RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [start_id, end_id])
- end
- end
-
- def insert_members(start_id, end_id)
- ActiveRecord::Base.connection.execute <<~INSERT_MEMBERS
- INSERT INTO fork_network_members (fork_network_id, project_id, forked_from_project_id)
-
- SELECT fork_network_members.fork_network_id,
- forked_project_links.forked_to_project_id,
- forked_project_links.forked_from_project_id
-
- FROM forked_project_links
-
- INNER JOIN fork_network_members
- ON forked_project_links.forked_from_project_id = fork_network_members.project_id
-
- WHERE forked_project_links.id BETWEEN #{start_id} AND #{end_id}
- AND NOT EXISTS (
- SELECT true
- FROM fork_network_members existing_members
- WHERE existing_members.project_id = forked_project_links.forked_to_project_id
- )
- INSERT_MEMBERS
- rescue ActiveRecord::RecordNotUnique => e
- # `fork_network_member` was created concurrently in another migration
- log(e.message)
- end
-
- def missing_members?(start_id, end_id)
- count_sql = <<~MISSING_MEMBERS
- SELECT COUNT(*)
-
- FROM forked_project_links
-
- WHERE NOT EXISTS (
- SELECT true
- FROM fork_network_members
- WHERE fork_network_members.project_id = forked_project_links.forked_to_project_id
- )
- AND EXISTS (
- SELECT true
- FROM projects
- WHERE forked_project_links.forked_from_project_id = projects.id
- )
- AND NOT EXISTS (
- SELECT true
- FROM forked_project_links AS parent_links
- WHERE parent_links.forked_to_project_id = forked_project_links.forked_from_project_id
- AND NOT EXISTS (
- SELECT true
- FROM projects
- WHERE parent_links.forked_from_project_id = projects.id
- )
- )
- AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
- MISSING_MEMBERS
-
- ForkedProjectLink.count_by_sql(count_sql) > 0
- end
-
- def log(message)
- Rails.logger.info("#{self.class.name} - #{message}")
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb b/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb
deleted file mode 100644
index 21b626dde56..00000000000
--- a/lib/gitlab/background_migration/delete_conflicting_redirect_routes_range.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class DeleteConflictingRedirectRoutesRange
- def perform(start_id, end_id)
- # No-op.
- # See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb b/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb
deleted file mode 100644
index 42fcaa87e66..00000000000
--- a/lib/gitlab/background_migration/migrate_events_to_push_event_payloads.rb
+++ /dev/null
@@ -1,179 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- # Class that migrates events for the new push event payloads setup. All
- # events are copied to a shadow table, and push events will also have a row
- # created in the push_event_payloads table.
- class MigrateEventsToPushEventPayloads
- class Event < ActiveRecord::Base
- self.table_name = 'events'
-
- serialize :data
-
- BLANK_REF = ('0' * 40).freeze
- TAG_REF_PREFIX = 'refs/tags/'.freeze
- MAX_INDEX = 69
- PUSHED = 5
-
- def push_event?
- action == PUSHED && data.present?
- end
-
- def commit_title
- commit = commits.last
-
- return unless commit && commit[:message]
-
- index = commit[:message].index("\n")
- message = index ? commit[:message][0..index] : commit[:message]
-
- message.strip.truncate(70)
- end
-
- def commit_from_sha
- if create?
- nil
- else
- data[:before]
- end
- end
-
- def commit_to_sha
- if remove?
- nil
- else
- data[:after]
- end
- end
-
- def data
- super || {}
- end
-
- def commits
- data[:commits] || []
- end
-
- def commit_count
- data[:total_commits_count] || 0
- end
-
- def ref
- data[:ref]
- end
-
- def trimmed_ref_name
- if ref_type == :tag
- ref[10..-1]
- else
- ref[11..-1]
- end
- end
-
- def create?
- data[:before] == BLANK_REF
- end
-
- def remove?
- data[:after] == BLANK_REF
- end
-
- def push_action
- if create?
- :created
- elsif remove?
- :removed
- else
- :pushed
- end
- end
-
- def ref_type
- if ref.start_with?(TAG_REF_PREFIX)
- :tag
- else
- :branch
- end
- end
- end
-
- class EventForMigration < ActiveRecord::Base
- self.table_name = 'events_for_migration'
- end
-
- class PushEventPayload < ActiveRecord::Base
- self.table_name = 'push_event_payloads'
-
- enum action: {
- created: 0,
- removed: 1,
- pushed: 2
- }
-
- enum ref_type: {
- branch: 0,
- tag: 1
- }
- end
-
- # start_id - The start ID of the range of events to process
- # end_id - The end ID of the range to process.
- def perform(start_id, end_id)
- return unless migrate?
-
- find_events(start_id, end_id).each { |event| process_event(event) }
- end
-
- def process_event(event)
- ActiveRecord::Base.transaction do
- replicate_event(event)
- create_push_event_payload(event) if event.push_event?
- end
- rescue ActiveRecord::InvalidForeignKey => e
- # A foreign key error means the associated event was removed. In this
- # case we'll just skip migrating the event.
- Rails.logger.error("Unable to migrate event #{event.id}: #{e}")
- end
-
- def replicate_event(event)
- new_attributes = event.attributes
- .with_indifferent_access.except(:title, :data)
-
- EventForMigration.create!(new_attributes)
- end
-
- def create_push_event_payload(event)
- commit_from = pack(event.commit_from_sha)
- commit_to = pack(event.commit_to_sha)
-
- PushEventPayload.create!(
- event_id: event.id,
- commit_count: event.commit_count,
- ref_type: event.ref_type,
- action: event.push_action,
- commit_from: commit_from,
- commit_to: commit_to,
- ref: event.trimmed_ref_name,
- commit_title: event.commit_title
- )
- end
-
- def find_events(start_id, end_id)
- Event
- .where('NOT EXISTS (SELECT true FROM events_for_migration WHERE events_for_migration.id = events.id)')
- .where(id: start_id..end_id)
- end
-
- def migrate?
- Event.table_exists? && PushEventPayload.table_exists? &&
- EventForMigration.table_exists?
- end
-
- def pack(value)
- value ? [value].pack('H*') : nil
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb b/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb
deleted file mode 100644
index ef50fe4adb1..00000000000
--- a/lib/gitlab/background_migration/migrate_system_uploads_to_new_folder.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class MigrateSystemUploadsToNewFolder
- include Gitlab::Database::MigrationHelpers
- attr_reader :old_folder, :new_folder
-
- class Upload < ActiveRecord::Base
- self.table_name = 'uploads'
- include EachBatch
- end
-
- def perform(old_folder, new_folder)
- replace_sql = replace_sql(uploads[:path], old_folder, new_folder)
- affected_uploads = Upload.where(uploads[:path].matches("#{old_folder}%"))
-
- affected_uploads.each_batch do |batch|
- batch.update_all("path = #{replace_sql}")
- end
- end
-
- def uploads
- Arel::Table.new('uploads')
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/move_personal_snippet_files.rb b/lib/gitlab/background_migration/move_personal_snippet_files.rb
deleted file mode 100644
index 5b2b2af718a..00000000000
--- a/lib/gitlab/background_migration/move_personal_snippet_files.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class MovePersonalSnippetFiles
- delegate :select_all, :execute, :quote_string, to: :connection
-
- def perform(relative_source, relative_destination)
- @source_relative_location = relative_source
- @destination_relative_location = relative_destination
-
- move_personal_snippet_files
- end
-
- def move_personal_snippet_files
- query = "SELECT uploads.path, uploads.model_id FROM uploads "\
- "INNER JOIN snippets ON snippets.id = uploads.model_id WHERE uploader = 'PersonalFileUploader'"
- select_all(query).each do |upload|
- secret = upload['path'].split('/')[0]
- file_name = upload['path'].split('/')[1]
-
- move_file(upload['model_id'], secret, file_name)
- update_markdown(upload['model_id'], secret, file_name)
- end
- end
-
- def move_file(snippet_id, secret, file_name)
- source_dir = File.join(base_directory, @source_relative_location, snippet_id.to_s, secret)
- destination_dir = File.join(base_directory, @destination_relative_location, snippet_id.to_s, secret)
-
- source_file_path = File.join(source_dir, file_name)
- destination_file_path = File.join(destination_dir, file_name)
-
- unless File.exist?(source_file_path)
- say "Source file `#{source_file_path}` doesn't exist. Skipping."
- return
- end
-
- say "Moving file #{source_file_path} -> #{destination_file_path}"
-
- FileUtils.mkdir_p(destination_dir)
- FileUtils.move(source_file_path, destination_file_path)
- end
-
- def update_markdown(snippet_id, secret, file_name)
- source_markdown_path = File.join(@source_relative_location, snippet_id.to_s, secret, file_name)
- destination_markdown_path = File.join(@destination_relative_location, snippet_id.to_s, secret, file_name)
-
- source_markdown = "](#{source_markdown_path})"
- destination_markdown = "](#{destination_markdown_path})"
- quoted_source = quote_string(source_markdown)
- quoted_destination = quote_string(destination_markdown)
-
- execute("UPDATE snippets "\
- "SET description = replace(snippets.description, '#{quoted_source}', '#{quoted_destination}'), description_html = NULL "\
- "WHERE id = #{snippet_id}")
-
- query = "SELECT id, note FROM notes WHERE noteable_id = #{snippet_id} "\
- "AND noteable_type = 'Snippet' AND note IS NOT NULL"
- select_all(query).each do |note|
- text = note['note'].gsub(source_markdown, destination_markdown)
- quoted_text = quote_string(text)
-
- execute("UPDATE notes SET note = '#{quoted_text}', note_html = NULL WHERE id = #{note['id']}")
- end
- end
-
- def base_directory
- File.join(Rails.root, 'public')
- end
-
- def connection
- ActiveRecord::Base.connection
- end
-
- def say(message)
- Rails.logger.debug(message)
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb b/lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb
deleted file mode 100644
index 48aa369705f..00000000000
--- a/lib/gitlab/background_migration/normalize_ldap_extern_uids_range.rb
+++ /dev/null
@@ -1,319 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Metrics/MethodLength
-# rubocop:disable Metrics/ClassLength
-# rubocop:disable Metrics/BlockLength
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class NormalizeLdapExternUidsRange
- class Identity < ActiveRecord::Base
- self.table_name = 'identities'
- end
-
- # Copied this class to make this migration resilient to future code changes.
- # And if the normalize behavior is changed in the future, it must be
- # accompanied by another migration.
- module Gitlab
- module Auth
- module LDAP
- class DN
- FormatError = Class.new(StandardError)
- MalformedError = Class.new(FormatError)
- UnsupportedError = Class.new(FormatError)
-
- def self.normalize_value(given_value)
- dummy_dn = "placeholder=#{given_value}"
- normalized_dn = new(*dummy_dn).to_normalized_s
- normalized_dn.sub(/\Aplaceholder=/, '')
- end
-
- ##
- # Initialize a DN, escaping as required. Pass in attributes in name/value
- # pairs. If there is a left over argument, it will be appended to the dn
- # without escaping (useful for a base string).
- #
- # Most uses of this class will be to escape a DN, rather than to parse it,
- # so storing the dn as an escaped String and parsing parts as required
- # with a state machine seems sensible.
- def initialize(*args)
- if args.length > 1
- initialize_array(args)
- else
- initialize_string(args[0])
- end
- end
-
- ##
- # Parse a DN into key value pairs using ASN from
- # http://tools.ietf.org/html/rfc2253 section 3.
- # rubocop:disable Metrics/AbcSize
- # rubocop:disable Metrics/CyclomaticComplexity
- # rubocop:disable Metrics/PerceivedComplexity
- def each_pair
- state = :key
- key = StringIO.new
- value = StringIO.new
- hex_buffer = ""
-
- @dn.each_char.with_index do |char, dn_index|
- case state
- when :key then
- case char
- when 'a'..'z', 'A'..'Z' then
- state = :key_normal
- key << char
- when '0'..'9' then
- state = :key_oid
- key << char
- when ' ' then state = :key
- else raise(MalformedError, "Unrecognized first character of an RDN attribute type name \"#{char}\"")
- end
- when :key_normal then
- case char
- when '=' then state = :value
- when 'a'..'z', 'A'..'Z', '0'..'9', '-', ' ' then key << char
- else raise(MalformedError, "Unrecognized RDN attribute type name character \"#{char}\"")
- end
- when :key_oid then
- case char
- when '=' then state = :value
- when '0'..'9', '.', ' ' then key << char
- else raise(MalformedError, "Unrecognized RDN OID attribute type name character \"#{char}\"")
- end
- when :value then
- case char
- when '\\' then state = :value_normal_escape
- when '"' then state = :value_quoted
- when ' ' then state = :value
- when '#' then
- state = :value_hexstring
- value << char
- when ',' then
- state = :key
- yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
- key = StringIO.new
- value = StringIO.new
- else
- state = :value_normal
- value << char
- end
- when :value_normal then
- case char
- when '\\' then state = :value_normal_escape
- when ',' then
- state = :key
- yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
- key = StringIO.new
- value = StringIO.new
- when '+' then raise(UnsupportedError, "Multivalued RDNs are not supported")
- else value << char
- end
- when :value_normal_escape then
- case char
- when '0'..'9', 'a'..'f', 'A'..'F' then
- state = :value_normal_escape_hex
- hex_buffer = char
- else
- state = :value_normal
- value << char
- end
- when :value_normal_escape_hex then
- case char
- when '0'..'9', 'a'..'f', 'A'..'F' then
- state = :value_normal
- value << "#{hex_buffer}#{char}".to_i(16).chr
- else raise(MalformedError, "Invalid escaped hex code \"\\#{hex_buffer}#{char}\"")
- end
- when :value_quoted then
- case char
- when '\\' then state = :value_quoted_escape
- when '"' then state = :value_end
- else value << char
- end
- when :value_quoted_escape then
- case char
- when '0'..'9', 'a'..'f', 'A'..'F' then
- state = :value_quoted_escape_hex
- hex_buffer = char
- else
- state = :value_quoted
- value << char
- end
- when :value_quoted_escape_hex then
- case char
- when '0'..'9', 'a'..'f', 'A'..'F' then
- state = :value_quoted
- value << "#{hex_buffer}#{char}".to_i(16).chr
- else raise(MalformedError, "Expected the second character of a hex pair inside a double quoted value, but got \"#{char}\"")
- end
- when :value_hexstring then
- case char
- when '0'..'9', 'a'..'f', 'A'..'F' then
- state = :value_hexstring_hex
- value << char
- when ' ' then state = :value_end
- when ',' then
- state = :key
- yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
- key = StringIO.new
- value = StringIO.new
- else raise(MalformedError, "Expected the first character of a hex pair, but got \"#{char}\"")
- end
- when :value_hexstring_hex then
- case char
- when '0'..'9', 'a'..'f', 'A'..'F' then
- state = :value_hexstring
- value << char
- else raise(MalformedError, "Expected the second character of a hex pair, but got \"#{char}\"")
- end
- when :value_end then
- case char
- when ' ' then state = :value_end
- when ',' then
- state = :key
- yield key.string.strip, rstrip_except_escaped(value.string, dn_index)
- key = StringIO.new
- value = StringIO.new
- else raise(MalformedError, "Expected the end of an attribute value, but got \"#{char}\"")
- end
- else raise "Fell out of state machine"
- end
- end
-
- # Last pair
- raise(MalformedError, 'DN string ended unexpectedly') unless
- [:value, :value_normal, :value_hexstring, :value_end].include? state
-
- yield key.string.strip, rstrip_except_escaped(value.string, @dn.length)
- end
-
- def rstrip_except_escaped(str, dn_index)
- str_ends_with_whitespace = str.match(/\s\z/)
-
- if str_ends_with_whitespace
- dn_part_ends_with_escaped_whitespace = @dn[0, dn_index].match(/\\(\s+)\z/)
-
- if dn_part_ends_with_escaped_whitespace
- dn_part_rwhitespace = dn_part_ends_with_escaped_whitespace[1]
- num_chars_to_remove = dn_part_rwhitespace.length - 1
- str = str[0, str.length - num_chars_to_remove]
- else
- str.rstrip!
- end
- end
-
- str
- end
-
- ##
- # Returns the DN as an array in the form expected by the constructor.
- def to_a
- a = []
- self.each_pair { |key, value| a << key << value } unless @dn.empty?
- a
- end
-
- ##
- # Return the DN as an escaped string.
- def to_s
- @dn
- end
-
- ##
- # Return the DN as an escaped and normalized string.
- def to_normalized_s
- self.class.new(*to_a).to_s.downcase
- end
-
- # https://tools.ietf.org/html/rfc4514 section 2.4 lists these exceptions
- # for DN values. All of the following must be escaped in any normal string
- # using a single backslash ('\') as escape. The space character is left
- # out here because in a "normalized" string, spaces should only be escaped
- # if necessary (i.e. leading or trailing space).
- NORMAL_ESCAPES = [',', '+', '"', '\\', '<', '>', ';', '='].freeze
-
- # The following must be represented as escaped hex
- HEX_ESCAPES = {
- "\n" => '\0a',
- "\r" => '\0d'
- }.freeze
-
- # Compiled character class regexp using the keys from the above hash, and
- # checking for a space or # at the start, or space at the end, of the
- # string.
- ESCAPE_RE = Regexp.new("(^ |^#| $|[" +
- NORMAL_ESCAPES.map { |e| Regexp.escape(e) }.join +
- "])")
-
- HEX_ESCAPE_RE = Regexp.new("([" +
- HEX_ESCAPES.keys.map { |e| Regexp.escape(e) }.join +
- "])")
-
- ##
- # Escape a string for use in a DN value
- def self.escape(string)
- escaped = string.gsub(ESCAPE_RE) { |char| "\\" + char }
- escaped.gsub(HEX_ESCAPE_RE) { |char| HEX_ESCAPES[char] }
- end
-
- private
-
- def initialize_array(args)
- buffer = StringIO.new
-
- args.each_with_index do |arg, index|
- if index.even? # key
- buffer << "," if index > 0
- buffer << arg
- else # value
- buffer << "="
- buffer << self.class.escape(arg)
- end
- end
-
- @dn = buffer.string
- end
-
- def initialize_string(arg)
- @dn = arg.to_s
- end
-
- ##
- # Proxy all other requests to the string object, because a DN is mainly
- # used within the library as a string
- # rubocop:disable GitlabSecurity/PublicSend
- def method_missing(method, *args, &block)
- @dn.send(method, *args, &block)
- end
-
- ##
- # Redefined to be consistent with redefined `method_missing` behavior
- def respond_to?(sym, include_private = false)
- @dn.respond_to?(sym, include_private)
- end
- end
- end
- end
- end
-
- def perform(start_id, end_id)
- return unless migrate?
-
- ldap_identities = Identity.where("provider like 'ldap%'").where(id: start_id..end_id)
- ldap_identities.each do |identity|
- identity.extern_uid = Gitlab::Auth::LDAP::DN.new(identity.extern_uid).to_normalized_s
- unless identity.save
- Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\". Skipping."
- end
- rescue Gitlab::Auth::LDAP::DN::FormatError => e
- Rails.logger.info "Unable to normalize \"#{identity.extern_uid}\" due to \"#{e.message}\". Skipping."
- end
- end
-
- def migrate?
- Identity.table_exists?
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_fork_networks_range.rb b/lib/gitlab/background_migration/populate_fork_networks_range.rb
deleted file mode 100644
index aa4f130538c..00000000000
--- a/lib/gitlab/background_migration/populate_fork_networks_range.rb
+++ /dev/null
@@ -1,128 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- # This background migration is going to create all `fork_networks` and
- # the `fork_network_members` for the roots of fork networks based on the
- # existing `forked_project_links`.
- #
- # When the source of a fork is deleted, we will create the fork with the
- # target project as the root. This way, when there are forks of the target
- # project, they will be joined into the same fork network.
- #
- # When the `fork_networks` and memberships for the root projects are created
- # the `CreateForkNetworkMembershipsRange` migration is scheduled. This
- # migration will create the memberships for all remaining forks-of-forks
- class PopulateForkNetworksRange
- def perform(start_id, end_id)
- create_fork_networks_for_existing_projects(start_id, end_id)
- create_fork_networks_for_missing_projects(start_id, end_id)
- create_fork_networks_memberships_for_root_projects(start_id, end_id)
-
- delay = BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
- BackgroundMigrationWorker.perform_in(
- delay, "CreateForkNetworkMembershipsRange", [start_id, end_id]
- )
- end
-
- def create_fork_networks_for_existing_projects(start_id, end_id)
- log("Creating fork networks: #{start_id} - #{end_id}")
- ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
- INSERT INTO fork_networks (root_project_id)
- SELECT DISTINCT forked_project_links.forked_from_project_id
-
- FROM forked_project_links
-
- -- Exclude the forks that are not the first level fork of a project
- WHERE NOT EXISTS (
- SELECT true
- FROM forked_project_links inner_links
- WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
- )
-
- /* Exclude the ones that are already created, in case the fork network
- was already created for another fork of the project.
- */
- AND NOT EXISTS (
- SELECT true
- FROM fork_networks
- WHERE forked_project_links.forked_from_project_id = fork_networks.root_project_id
- )
-
- -- Only create a fork network for a root project that still exists
- AND EXISTS (
- SELECT true
- FROM projects
- WHERE projects.id = forked_project_links.forked_from_project_id
- )
- AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
- INSERT_NETWORKS
- end
-
- def create_fork_networks_for_missing_projects(start_id, end_id)
- log("Creating fork networks with missing root: #{start_id} - #{end_id}")
- ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
- INSERT INTO fork_networks (root_project_id)
- SELECT DISTINCT forked_project_links.forked_to_project_id
-
- FROM forked_project_links
-
- -- Exclude forks that are not the root forks
- WHERE NOT EXISTS (
- SELECT true
- FROM forked_project_links inner_links
- WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
- )
-
- /* Exclude the ones that are already created, in case this migration is
- re-run
- */
- AND NOT EXISTS (
- SELECT true
- FROM fork_networks
- WHERE forked_project_links.forked_to_project_id = fork_networks.root_project_id
- )
-
- /* Exclude projects for which the project still exists, those are
- Processed in the previous step of this migration
- */
- AND NOT EXISTS (
- SELECT true
- FROM projects
- WHERE projects.id = forked_project_links.forked_from_project_id
- )
- AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
- INSERT_NETWORKS
- end
-
- def create_fork_networks_memberships_for_root_projects(start_id, end_id)
- log("Creating memberships for root projects: #{start_id} - #{end_id}")
-
- ActiveRecord::Base.connection.execute <<~INSERT_ROOT
- INSERT INTO fork_network_members (fork_network_id, project_id)
- SELECT DISTINCT fork_networks.id, fork_networks.root_project_id
-
- FROM fork_networks
-
- /* Joining both on forked_from- and forked_to- so we could create the
- memberships for forks for which the source was deleted
- */
- INNER JOIN forked_project_links
- ON forked_project_links.forked_from_project_id = fork_networks.root_project_id
- OR forked_project_links.forked_to_project_id = fork_networks.root_project_id
-
- WHERE NOT EXISTS (
- SELECT true
- FROM fork_network_members
- WHERE fork_network_members.project_id = fork_networks.root_project_id
- )
- AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
- INSERT_ROOT
- end
-
- def log(message)
- Rails.logger.info("#{self.class.name} - #{message}")
- end
- end
- end
-end
diff --git a/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb b/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb
deleted file mode 100644
index dcac355e1b0..00000000000
--- a/lib/gitlab/background_migration/populate_merge_requests_latest_merge_request_diff_id.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-# rubocop:disable Style/Documentation
-
-module Gitlab
- module BackgroundMigration
- class PopulateMergeRequestsLatestMergeRequestDiffId
- BATCH_SIZE = 1_000
-
- class MergeRequest < ActiveRecord::Base
- self.table_name = 'merge_requests'
-
- include ::EachBatch
- end
-
- def perform(start_id, stop_id)
- update = '
- latest_merge_request_diff_id = (
- SELECT MAX(id)
- FROM merge_request_diffs
- WHERE merge_requests.id = merge_request_diffs.merge_request_id
- )'.squish
-
- MergeRequest
- .where(id: start_id..stop_id)
- .where(latest_merge_request_diff_id: nil)
- .each_batch(of: BATCH_SIZE) do |relation|
-
- relation.update_all(update)
- end
- end
- end
- end
-end
diff --git a/lib/gitlab/ci/build/prerequisite/kubernetes_namespace.rb b/lib/gitlab/ci/build/prerequisite/kubernetes_namespace.rb
index 49c680605ea..e6e0aaab60b 100644
--- a/lib/gitlab/ci/build/prerequisite/kubernetes_namespace.rb
+++ b/lib/gitlab/ci/build/prerequisite/kubernetes_namespace.rb
@@ -20,7 +20,7 @@ module Gitlab
private
def deployment_cluster
- build.deployment&.deployment_platform_cluster
+ build.deployment&.cluster
end
def kubernetes_namespace
diff --git a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
index 65a6630365d..cf3d261c1cb 100644
--- a/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
@@ -74,16 +74,16 @@ stages:
- cleanup
include:
- - template: Jobs/Build.gitlab-ci.yml
- - template: Jobs/Test.gitlab-ci.yml
- - template: Jobs/Code-Quality.gitlab-ci.yml
- - template: Jobs/Deploy.gitlab-ci.yml
- - template: Jobs/Browser-Performance-Testing.gitlab-ci.yml
- - template: Security/DAST.gitlab-ci.yml
- - template: Security/Container-Scanning.gitlab-ci.yml
- - template: Security/Dependency-Scanning.gitlab-ci.yml
- - template: Security/License-Management.gitlab-ci.yml
- - template: Security/SAST.gitlab-ci.yml
+ - template: Jobs/Build.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml
+ - template: Jobs/Test.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Jobs/Test.gitlab-ci.yml
+ - template: Jobs/Code-Quality.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml
+ - template: Jobs/Deploy.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml
+ - template: Jobs/Browser-Performance-Testing.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Jobs/Browser-Performance-Testing.gitlab-ci.yml
+ - template: Security/DAST.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
+ - template: Security/Container-Scanning.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml
+ - template: Security/Dependency-Scanning.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Security/Dependency-Scanning.gitlab-ci.yml
+ - template: Security/License-Management.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Security/License-Management.gitlab-ci.yml
+ - template: Security/SAST.gitlab-ci.yml # https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml
# Override DAST job to exclude master branch
dast:
diff --git a/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml b/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml
index 0d742aa282d..e7dacd3a1fc 100644
--- a/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Pages/Jekyll.gitlab-ci.yml
@@ -4,6 +4,7 @@ image: ruby:2.3
variables:
JEKYLL_ENV: production
+ LC_ALL: C.UTF-8
before_script:
- bundle install
diff --git a/lib/gitlab/ci/trace.rb b/lib/gitlab/ci/trace.rb
index dfae260239e..ce5857965bf 100644
--- a/lib/gitlab/ci/trace.rb
+++ b/lib/gitlab/ci/trace.rb
@@ -5,7 +5,7 @@ module Gitlab
class Trace
include ::Gitlab::ExclusiveLeaseHelpers
- LOCK_TTL = 1.minute
+ LOCK_TTL = 10.minutes
LOCK_RETRIES = 2
LOCK_SLEEP = 0.001.seconds
diff --git a/lib/gitlab/cleanup/orphan_job_artifact_files.rb b/lib/gitlab/cleanup/orphan_job_artifact_files.rb
new file mode 100644
index 00000000000..ee7164b3e55
--- /dev/null
+++ b/lib/gitlab/cleanup/orphan_job_artifact_files.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Cleanup
+ class OrphanJobArtifactFiles
+ include Gitlab::Utils::StrongMemoize
+
+ ABSOLUTE_ARTIFACT_DIR = ::JobArtifactUploader.root.freeze
+ LOST_AND_FOUND = File.join(ABSOLUTE_ARTIFACT_DIR, '-', 'lost+found').freeze
+ BATCH_SIZE = 500
+ DEFAULT_NICENESS = 'Best-effort'
+
+ attr_accessor :batch, :total_found, :total_cleaned
+ attr_reader :limit, :dry_run, :niceness, :logger
+
+ def initialize(limit: nil, dry_run: true, niceness: nil, logger: nil)
+ @limit = limit
+ @dry_run = dry_run
+ @niceness = niceness || DEFAULT_NICENESS
+ @logger = logger || Rails.logger
+ @total_found = @total_cleaned = 0
+
+ new_batch!
+ end
+
+ def run!
+ log_info('Looking for orphan job artifacts to clean up')
+
+ find_artifacts do |artifact_file|
+ batch << artifact_file
+
+ clean_batch! if batch.full?
+ break if limit_reached?
+ end
+
+ clean_batch!
+
+ log_info("Processed #{total_found} job artifacts to find and clean #{total_cleaned} orphans.")
+ end
+
+ private
+
+ def new_batch!
+ self.batch = ::Gitlab::Cleanup::OrphanJobArtifactFilesBatch
+ .new(batch_size: batch_size, logger: logger, dry_run: dry_run)
+ end
+
+ def clean_batch!
+ batch.clean!
+
+ update_stats!(batch)
+
+ new_batch!
+ end
+
+ def update_stats!(batch)
+ self.total_found += batch.artifact_files.count
+ self.total_cleaned += batch.lost_and_found.count
+ end
+
+ def limit_reached?
+ return false unless limit
+
+ total_cleaned >= limit
+ end
+
+ def batch_size
+ return BATCH_SIZE unless limit
+ return if limit_reached?
+
+ todo = limit - total_cleaned
+ [BATCH_SIZE, todo].min
+ end
+
+ def find_artifacts
+ Open3.popen3(*find_command) do |stdin, stdout, stderr, status_thread|
+ stdout.each_line do |line|
+ yield line
+ end
+
+ log_error(stderr.read.color(:red)) unless status_thread.value.success?
+ end
+ end
+
+ def find_command
+ strong_memoize(:find_command) do
+ cmd = %W[find -L #{absolute_artifact_dir}]
+
+ # Search for Job Artifact IDs, they are found 6 directory
+ # levels deep. For example:
+ # shared/artifacts/2c/62/2c...a3/2019_02_27/836/628/job.log
+ # 1 2 3 4 5 6
+ # | | | ^- date | ^- Job Artifact ID
+ # | | | ^- Job ID
+ # ^--+--+- components of hashed storage project path
+ cmd += %w[-mindepth 6 -maxdepth 6]
+
+ # Artifact directories are named on their ID
+ cmd += %w[-type d]
+
+ if ionice
+ raise ArgumentError, 'Invalid niceness' unless niceness.match?(/^\w[\w\-]*$/)
+
+ cmd.unshift(*%W[#{ionice} --class #{niceness}])
+ end
+
+ log_info("find command: '#{cmd.join(' ')}'")
+
+ cmd
+ end
+ end
+
+ def absolute_artifact_dir
+ File.absolute_path(ABSOLUTE_ARTIFACT_DIR)
+ end
+
+ def ionice
+ strong_memoize(:ionice) do
+ Gitlab::Utils.which('ionice')
+ end
+ end
+
+ def log_info(msg, params = {})
+ logger.info("#{'[DRY RUN]' if dry_run} #{msg}")
+ end
+
+ def log_error(msg, params = {})
+ logger.error(msg)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cleanup/orphan_job_artifact_files_batch.rb b/lib/gitlab/cleanup/orphan_job_artifact_files_batch.rb
new file mode 100644
index 00000000000..5c30258c0fc
--- /dev/null
+++ b/lib/gitlab/cleanup/orphan_job_artifact_files_batch.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Cleanup
+ class OrphanJobArtifactFilesBatch
+ BatchFull = Class.new(StandardError)
+
+ class ArtifactFile
+ attr_accessor :path
+
+ def initialize(path)
+ @path = path
+ end
+
+ def artifact_id
+ path.split('/').last.to_i
+ end
+ end
+
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :batch_size, :dry_run
+ attr_accessor :artifact_files
+
+ def initialize(batch_size:, dry_run: true, logger: Rails.logger)
+ @batch_size = batch_size
+ @dry_run = dry_run
+ @logger = logger
+ @artifact_files = []
+ end
+
+ def clean!
+ return if artifact_files.empty?
+
+ lost_and_found.each do |artifact|
+ clean_one!(artifact)
+ end
+ end
+
+ def full?
+ artifact_files.count >= batch_size
+ end
+
+ def <<(artifact_path)
+ raise BatchFull, "Batch full! Already contains #{artifact_files.count} artifacts" if full?
+
+ artifact_files << ArtifactFile.new(artifact_path)
+ end
+
+ def lost_and_found
+ strong_memoize(:lost_and_found) do
+ artifact_file_ids = artifact_files.map(&:artifact_id)
+ existing_artifact_ids = ::Ci::JobArtifact.id_in(artifact_file_ids).pluck_primary_key
+
+ artifact_files.reject { |artifact| existing_artifact_ids.include?(artifact.artifact_id) }
+ end
+ end
+
+ private
+
+ def clean_one!(artifact_file)
+ log_debug("Found orphan job artifact file @ #{artifact_file.path}")
+
+ remove_file!(artifact_file) unless dry_run
+ end
+
+ def remove_file!(artifact_file)
+ FileUtils.rm_rf(artifact_file.path)
+ end
+
+ def log_info(msg, params = {})
+ @logger.info("#{'[DRY RUN]' if dry_run} #{msg}")
+ end
+
+ def log_debug(msg, params = {})
+ @logger.debug(msg)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/cluster/lifecycle_events.rb b/lib/gitlab/cluster/lifecycle_events.rb
index e0f9eb59924..8f796748199 100644
--- a/lib/gitlab/cluster/lifecycle_events.rb
+++ b/lib/gitlab/cluster/lifecycle_events.rb
@@ -11,6 +11,9 @@ module Gitlab
# We have three lifecycle events.
#
# - before_fork (only in forking processes)
+ # In forking processes (Unicorn and Puma in multiprocess mode) this
+ # will be called exactly once, on startup, before the workers are
+ # forked. This will be called in the parent process.
# - worker_start
# - before_master_restart (only in forking processes)
#
diff --git a/lib/gitlab/database.rb b/lib/gitlab/database.rb
index e4d4779ba9a..34c1e6ad8ca 100644
--- a/lib/gitlab/database.rb
+++ b/lib/gitlab/database.rb
@@ -2,6 +2,8 @@
module Gitlab
module Database
+ include Gitlab::Metrics::Methods
+
# The max value of INTEGER type is the same between MySQL and PostgreSQL:
# https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
# http://dev.mysql.com/doc/refman/5.7/en/integer-types.html
@@ -11,6 +13,15 @@ module Gitlab
# https://dev.mysql.com/doc/refman/5.7/en/datetime.html
MAX_TIMESTAMP_VALUE = Time.at((1 << 31) - 1).freeze
+ # Minimum schema version from which migrations are supported
+ # Migrations before this version may have been removed
+ MIN_SCHEMA_VERSION = 20190506135400
+ MIN_SCHEMA_GITLAB_VERSION = '11.11.0'
+
+ define_histogram :gitlab_database_transaction_seconds do
+ docstring "Time spent in database transactions, in seconds"
+ end
+
def self.config
ActiveRecord::Base.configurations[Rails.env]
end
@@ -286,5 +297,32 @@ module Gitlab
0
end
private_class_method :open_transactions_baseline
+
+ # Monkeypatch rails with upgraded database observability
+ def self.install_monkey_patches
+ ActiveRecord::Base.prepend(ActiveRecordBaseTransactionMetrics)
+ end
+
+ # observe_transaction_duration is called from ActiveRecordBaseTransactionMetrics.transaction and used to
+ # record transaction durations.
+ def self.observe_transaction_duration(duration_seconds)
+ labels = Gitlab::Metrics::Transaction.current&.labels || {}
+ gitlab_database_transaction_seconds.observe(labels, duration_seconds)
+ rescue Prometheus::Client::LabelSetValidator::LabelSetError => err
+ # Ensure that errors in recording these metrics don't affect the operation of the application
+ Rails.logger.error("Unable to observe database transaction duration: #{err}")
+ end
+
+ # MonkeyPatch for ActiveRecord::Base for adding observability
+ module ActiveRecordBaseTransactionMetrics
+ # A monkeypatch over ActiveRecord::Base.transaction.
+ # It provides observability into transactional methods.
+ def transaction(options = {}, &block)
+ start_time = Gitlab::Metrics::System.monotonic_time
+ super(options, &block)
+ ensure
+ Gitlab::Database.observe_transaction_duration(Gitlab::Metrics::System.monotonic_time - start_time)
+ end
+ end
end
end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index 0b12e862ded..e2cbf91f281 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -434,7 +434,8 @@ module Gitlab
end
begin
- update_column_in_batches(table, column, default, &block)
+ default_after_type_cast = connection.type_cast(default, column_for(table, column))
+ update_column_in_batches(table, column, default_after_type_cast, &block)
change_column_null(table, column, false) unless allow_null
# We want to rescue _all_ exceptions here, even those that don't inherit
diff --git a/lib/gitlab/diff/lines_unfolder.rb b/lib/gitlab/diff/lines_unfolder.rb
index 6cf904b2b2a..0bd18fe9622 100644
--- a/lib/gitlab/diff/lines_unfolder.rb
+++ b/lib/gitlab/diff/lines_unfolder.rb
@@ -54,7 +54,7 @@ module Gitlab
def unfold_required?
strong_memoize(:unfold_required) do
next false unless @diff_file.text?
- next false unless @position.unchanged?
+ next false unless @position.on_text? && @position.unchanged?
next false if @diff_file.new_file? || @diff_file.deleted_file?
next false unless @position.old_line
# Invalid position (MR import scenario)
diff --git a/lib/gitlab/git/raw_diff_change.rb b/lib/gitlab/git/raw_diff_change.rb
index e1002af40f6..9a41f04a4db 100644
--- a/lib/gitlab/git/raw_diff_change.rb
+++ b/lib/gitlab/git/raw_diff_change.rb
@@ -11,8 +11,8 @@ module Gitlab
if raw_change.is_a?(Gitaly::GetRawChangesResponse::RawChange)
@blob_id = raw_change.blob_id
@blob_size = raw_change.size
- @old_path = raw_change.old_path.presence
- @new_path = raw_change.new_path.presence
+ @old_path = raw_change.old_path_bytes.presence
+ @new_path = raw_change.new_path_bytes.presence
@operation = raw_change.operation&.downcase || :unknown
else
parse(raw_change)
diff --git a/lib/gitlab/graphql/copy_field_description.rb b/lib/gitlab/graphql/copy_field_description.rb
new file mode 100644
index 00000000000..edd73083ff2
--- /dev/null
+++ b/lib/gitlab/graphql/copy_field_description.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module CopyFieldDescription
+ extend ActiveSupport::Concern
+
+ class_methods do
+ # Returns the `description` for property of field `field_name` on type.
+ # This can be used to ensure, for example, that mutation argument descriptions
+ # are always identical to the corresponding query field descriptions.
+ #
+ # E.g.:
+ # argument :name, GraphQL::STRING_TYPE, description: copy_field_description(Types::UserType, :name)
+ def copy_field_description(type, field_name)
+ type.fields[field_name.to_s.camelize(:lower)].description
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/errors.rb b/lib/gitlab/graphql/errors.rb
index fe74549e322..40b90310e8b 100644
--- a/lib/gitlab/graphql/errors.rb
+++ b/lib/gitlab/graphql/errors.rb
@@ -6,6 +6,7 @@ module Gitlab
BaseError = Class.new(GraphQL::ExecutionError)
ArgumentError = Class.new(BaseError)
ResourceNotAvailable = Class.new(BaseError)
+ MutationError = Class.new(BaseError)
end
end
end
diff --git a/lib/gitlab/graphql/find_argument_in_parent.rb b/lib/gitlab/graphql/find_argument_in_parent.rb
new file mode 100644
index 00000000000..1f83f8fce7a
--- /dev/null
+++ b/lib/gitlab/graphql/find_argument_in_parent.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module FindArgumentInParent
+ # Searches up the GraphQL AST and returns the first matching argument
+ # passed to a node
+ def self.find(parent, argument, limit_depth: nil)
+ argument = argument.to_s.camelize(:lower).to_sym
+ depth = 0
+
+ while parent.respond_to?(:parent)
+ args = node_args(parent)
+ return args[argument] if args.key?(argument)
+
+ depth += 1
+ return if limit_depth && depth >= limit_depth
+
+ parent = parent.parent
+ end
+ end
+
+ class << self
+ private
+
+ def node_args(node)
+ node.irep_node.arguments
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/graphql/loaders/pipeline_for_sha_loader.rb b/lib/gitlab/graphql/loaders/pipeline_for_sha_loader.rb
new file mode 100644
index 00000000000..81c5cabf451
--- /dev/null
+++ b/lib/gitlab/graphql/loaders/pipeline_for_sha_loader.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Graphql
+ module Loaders
+ class PipelineForShaLoader
+ attr_accessor :project, :sha
+
+ def initialize(project, sha)
+ @project, @sha = project, sha
+ end
+
+ def find_last
+ BatchLoader.for(sha).batch(key: project) do |shas, loader, args|
+ pipelines = args[:key].ci_pipelines.latest_for_shas(shas)
+
+ pipelines.each do |pipeline|
+ loader.call(pipeline.sha, pipeline)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/legacy_github_import/release_formatter.rb b/lib/gitlab/legacy_github_import/release_formatter.rb
index 746786b5a66..fdab6b512ea 100644
--- a/lib/gitlab/legacy_github_import/release_formatter.rb
+++ b/lib/gitlab/legacy_github_import/release_formatter.rb
@@ -10,6 +10,7 @@ module Gitlab
name: raw_data.name,
description: raw_data.body,
created_at: raw_data.created_at,
+ released_at: raw_data.published_at,
updated_at: raw_data.created_at
}
end
diff --git a/lib/gitlab/metrics/dashboard/base_service.rb b/lib/gitlab/metrics/dashboard/base_service.rb
index 90895eb237a..0628e82e592 100644
--- a/lib/gitlab/metrics/dashboard/base_service.rb
+++ b/lib/gitlab/metrics/dashboard/base_service.rb
@@ -10,6 +10,8 @@ module Gitlab
NOT_FOUND_ERROR = Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError
def get_dashboard
+ return error('Insufficient permissions.', :unauthorized) unless allowed?
+
success(dashboard: process_dashboard)
rescue NOT_FOUND_ERROR
error("#{dashboard_path} could not be found.", :not_found)
@@ -30,6 +32,12 @@ module Gitlab
private
+ # Determines whether users should be able to view
+ # dashboards at all.
+ def allowed?
+ Ability.allowed?(current_user, :read_environment, project)
+ end
+
# Returns a new dashboard Hash, supplemented with DB info
def process_dashboard
Gitlab::Metrics::Dashboard::Processor
diff --git a/lib/gitlab/metrics/system.rb b/lib/gitlab/metrics/system.rb
index 33c0de91c11..34de40ca72f 100644
--- a/lib/gitlab/metrics/system.rb
+++ b/lib/gitlab/metrics/system.rb
@@ -57,17 +57,9 @@ module Gitlab
end
end
- # THREAD_CPUTIME is not supported on OS X
- if Process.const_defined?(:CLOCK_THREAD_CPUTIME_ID)
- def self.cpu_time
- Process
- .clock_gettime(Process::CLOCK_THREAD_CPUTIME_ID, :float_second)
- end
- else
- def self.cpu_time
- Process
- .clock_gettime(Process::CLOCK_PROCESS_CPUTIME_ID, :float_second)
- end
+ def self.cpu_time
+ Process
+ .clock_gettime(Process::CLOCK_PROCESS_CPUTIME_ID, :float_second)
end
# Returns the current real time in a given precision.
diff --git a/lib/gitlab/optimistic_locking.rb b/lib/gitlab/optimistic_locking.rb
index 868b2ae641a..0c0f46d3b77 100644
--- a/lib/gitlab/optimistic_locking.rb
+++ b/lib/gitlab/optimistic_locking.rb
@@ -5,6 +5,7 @@ module Gitlab
module_function
def retry_lock(subject, retries = 100, &block)
+ # TODO(Observability): We should be recording details of the number of retries and the duration of the total execution here
ActiveRecord::Base.transaction do
yield(subject)
end
diff --git a/lib/gitlab/search/found_blob.rb b/lib/gitlab/search/found_blob.rb
index 01ce90c85f7..fa09ecbdf30 100644
--- a/lib/gitlab/search/found_blob.rb
+++ b/lib/gitlab/search/found_blob.rb
@@ -28,7 +28,7 @@ module Gitlab
@binary_data = opts.fetch(:data, nil)
@per_page = opts.fetch(:per_page, 20)
@project = opts.fetch(:project, nil)
- # Some caller does not have project object (e.g. elastic search),
+ # Some callers (e.g. Elasticsearch) do not have the Project object,
# yet they can trigger many calls in one go,
# causing duplicated queries.
# Allow those to just pass project_id instead.
diff --git a/lib/gitlab/thread_memory_cache.rb b/lib/gitlab/thread_memory_cache.rb
new file mode 100644
index 00000000000..7f363dc7feb
--- /dev/null
+++ b/lib/gitlab/thread_memory_cache.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Gitlab
+ class ThreadMemoryCache
+ THREAD_KEY = :thread_memory_cache
+
+ def self.cache_backend
+ # Note ActiveSupport::Cache::MemoryStore is thread-safe. Since
+ # each backend is local per thread we probably don't need to worry
+ # about synchronizing access, but this is a drop-in replacement
+ # for ActiveSupport::Cache::RedisStore.
+ Thread.current[THREAD_KEY] ||= ActiveSupport::Cache::MemoryStore.new
+ end
+ end
+end
diff --git a/lib/peek/views/redis.rb b/lib/peek/views/redis.rb
new file mode 100644
index 00000000000..ad3c3c9fe01
--- /dev/null
+++ b/lib/peek/views/redis.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'redis'
+require 'peek-redis'
+
+module Gitlab
+ module Peek
+ module RedisInstrumented
+ def call(*args, &block)
+ start = Time.now
+ super(*args, &block)
+ ensure
+ duration = (Time.now - start)
+ add_call_details(duration, args)
+ end
+
+ private
+
+ def add_call_details(duration, args)
+ # redis-rb passes an array (e.g. [:get, key])
+ return unless args.length == 1
+
+ detail_store << {
+ cmd: args.first,
+ duration: duration,
+ backtrace: Gitlab::Profiler.clean_backtrace(caller)
+ }
+ end
+
+ def detail_store
+ ::Gitlab::SafeRequestStore['redis_call_details'] ||= []
+ end
+ end
+ end
+end
+
+module Peek
+ module Views
+ module RedisDetailed
+ def results
+ super.merge(details: details)
+ end
+
+ def details
+ detail_store
+ .sort { |a, b| b[:duration] <=> a[:duration] }
+ .map(&method(:format_call_details))
+ end
+
+ def detail_store
+ ::Gitlab::SafeRequestStore['redis_call_details'] ||= []
+ end
+
+ def format_call_details(call)
+ call.merge(cmd: format_command(call[:cmd]),
+ duration: (call[:duration] * 1000).round(3))
+ end
+
+ def format_command(cmd)
+ # Scrub out the value of the SET calls to avoid binary
+ # data or large data from spilling into the view
+ if cmd.length >= 2 && cmd.first =~ /set/i
+ cmd[-1] = "<redacted>"
+ end
+
+ cmd.join(' ')
+ end
+ end
+ end
+end
+
+class Redis::Client
+ prepend Gitlab::Peek::RedisInstrumented
+end
+
+module Peek
+ module Views
+ class Redis < View
+ prepend Peek::Views::RedisDetailed
+ end
+ end
+end
diff --git a/lib/tasks/gitlab/cleanup.rake b/lib/tasks/gitlab/cleanup.rake
index 760331620ef..105ef417df3 100644
--- a/lib/tasks/gitlab/cleanup.rake
+++ b/lib/tasks/gitlab/cleanup.rake
@@ -115,6 +115,18 @@ namespace :gitlab do
end
end
+ desc 'GitLab | Cleanup | Clean orphan job artifact files'
+ task orphan_job_artifact_files: :gitlab_environment do
+ warn_user_is_not_gitlab
+
+ cleaner = Gitlab::Cleanup::OrphanJobArtifactFiles.new(limit: limit, dry_run: dry_run?, niceness: niceness, logger: logger)
+ cleaner.run!
+
+ if dry_run?
+ logger.info "To clean up these files run this command with DRY_RUN=false".color(:yellow)
+ end
+ end
+
def remove?
ENV['REMOVE'] == 'true'
end
@@ -123,12 +135,25 @@ namespace :gitlab do
ENV['DRY_RUN'] != 'false'
end
+ def debug?
+ ENV['DEBUG'].present?
+ end
+
+ def limit
+ ENV['LIMIT']&.to_i
+ end
+
+ def niceness
+ ENV['NICENESS'].presence
+ end
+
def logger
return @logger if defined?(@logger)
@logger = if Rails.env.development? || Rails.env.production?
Logger.new(STDOUT).tap do |stdout_logger|
stdout_logger.extend(ActiveSupport::Logger.broadcast(Rails.logger))
+ stdout_logger.level = debug? ? Logger::DEBUG : Logger::INFO
end
else
Rails.logger
diff --git a/lib/tasks/migrate/schema_check.rake b/lib/tasks/migrate/schema_check.rake
new file mode 100644
index 00000000000..76f1f23c7bd
--- /dev/null
+++ b/lib/tasks/migrate/schema_check.rake
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# Configures the database by running migrate, or by loading the schema and seeding if needed
+task schema_version_check: :environment do
+ next if ENV['SKIP_SCHEMA_VERSION_CHECK']
+
+ schema_version = ActiveRecord::Migrator.current_version
+
+ # Ensure migrations are being run from a supported schema version
+ # A schema verison of 0 is a fresh db, and should be safe to run migrations
+ # But a database with existing migrations less than our min version is not
+ if schema_version > 0 && schema_version < Gitlab::Database::MIN_SCHEMA_VERSION
+ raise "Your current database version is too old to be migrated. " \
+ "You should upgrade to GitLab #{Gitlab::Database::MIN_SCHEMA_GITLAB_VERSION} before moving to this version. " \
+ "Please see https://docs.gitlab.com/ee/policy/maintenance.html#upgrade-recommendations"
+ end
+end
+
+# Ensure the check is a pre-requisite when running db:migrate
+Rake::Task["db:migrate"].enhance [:schema_version_check]
diff --git a/lib/tasks/migrate/setup_postgresql.rake b/lib/tasks/migrate/setup_postgresql.rake
index f69d204c579..cda88c130bb 100644
--- a/lib/tasks/migrate/setup_postgresql.rake
+++ b/lib/tasks/migrate/setup_postgresql.rake
@@ -1,23 +1,9 @@
desc 'GitLab | Sets up PostgreSQL'
task setup_postgresql: :environment do
- require Rails.root.join('db/migrate/20151007120511_namespaces_projects_path_lower_indexes')
- require Rails.root.join('db/migrate/20151008110232_add_users_lower_username_email_indexes')
- require Rails.root.join('db/migrate/20161212142807_add_lower_path_index_to_routes')
- require Rails.root.join('db/migrate/20170317203554_index_routes_path_for_like')
- require Rails.root.join('db/migrate/20170724214302_add_lower_path_index_to_redirect_routes')
- require Rails.root.join('db/migrate/20170503185032_index_redirect_routes_path_for_like')
- require Rails.root.join('db/migrate/20171220191323_add_index_on_namespaces_lower_name.rb')
require Rails.root.join('db/migrate/20180215181245_users_name_lower_index.rb')
require Rails.root.join('db/migrate/20180504195842_project_name_lower_index.rb')
require Rails.root.join('db/post_migrate/20180306164012_add_path_index_to_redirect_routes.rb')
- NamespacesProjectsPathLowerIndexes.new.up
- AddUsersLowerUsernameEmailIndexes.new.up
- AddLowerPathIndexToRoutes.new.up
- IndexRoutesPathForLike.new.up
- AddLowerPathIndexToRedirectRoutes.new.up
- IndexRedirectRoutesPathForLike.new.up
- AddIndexOnNamespacesLowerName.new.up
UsersNameLowerIndex.new.up
ProjectNameLowerIndex.new.up
AddPathIndexToRedirectRoutes.new.up
diff --git a/lib/tasks/yarn.rake b/lib/tasks/yarn.rake
index 2ac88a039e7..32061ad4a57 100644
--- a/lib/tasks/yarn.rake
+++ b/lib/tasks/yarn.rake
@@ -24,7 +24,7 @@ namespace :yarn do
desc 'Install Node dependencies with Yarn'
task install: ['yarn:available'] do
- unless system('yarn install --pure-lockfile --ignore-engines')
+ unless system('yarn install --pure-lockfile --ignore-engines --prefer-offline')
abort 'Error: Unable to install node modules.'.color(:red)
end
end