summaryrefslogtreecommitdiff
path: root/app/models
diff options
context:
space:
mode:
authorWinnie Hellmann <winnie@gitlab.com>2018-05-16 22:43:39 +0200
committerWinnie Hellmann <winnie@gitlab.com>2018-05-16 22:43:39 +0200
commit034d8ced8836f6380e758531d114f252dd13a666 (patch)
treecab7094dd820d6c3a299f3191820c57228484766 /app/models
parentddc29487a71d6557c6b1e7ca4e67f7a99384777b (diff)
parent790fee0dc724c734c7430f773ae8f7f91df6bf2b (diff)
downloadgitlab-ce-winh-cleanup-changes_tab_vue_refactoring.tar.gz
Remove unrelated changes from changes_tab_vue_refactoringwinh-cleanup-changes_tab_vue_refactoring
Diffstat (limited to 'app/models')
-rw-r--r--app/models/ability.rb8
-rw-r--r--app/models/application_setting.rb22
-rw-r--r--app/models/application_setting/term.rb13
-rw-r--r--app/models/ci/build.rb1
-rw-r--r--app/models/ci/build_trace_chunk.rb180
-rw-r--r--app/models/ci/pipeline.rb45
-rw-r--r--app/models/ci/pipeline_variable.rb2
-rw-r--r--app/models/ci/runner.rb79
-rw-r--r--app/models/ci/runner_namespace.rb9
-rw-r--r--app/models/clusters/applications/runner.rb10
-rw-r--r--app/models/concerns/fast_destroy_all.rb91
-rw-r--r--app/models/concerns/participable.rb4
-rw-r--r--app/models/concerns/reactive_caching.rb17
-rw-r--r--app/models/concerns/routable.rb4
-rw-r--r--app/models/concerns/sha_attribute.rb30
-rw-r--r--app/models/concerns/time_trackable.rb6
-rw-r--r--app/models/group.rb41
-rw-r--r--app/models/identity.rb8
-rw-r--r--app/models/member.rb11
-rw-r--r--app/models/merge_request.rb6
-rw-r--r--app/models/namespace.rb10
-rw-r--r--app/models/note.rb4
-rw-r--r--app/models/project.rb232
-rw-r--r--app/models/project_ci_cd_setting.rb2
-rw-r--r--app/models/project_import_state.rb55
-rw-r--r--app/models/remote_mirror.rb219
-rw-r--r--app/models/repository.rb26
-rw-r--r--app/models/sent_notification.rb4
-rw-r--r--app/models/system_note_metadata.rb6
-rw-r--r--app/models/term_agreement.rb6
-rw-r--r--app/models/user.rb24
-rw-r--r--app/models/user_callout.rb3
32 files changed, 1065 insertions, 113 deletions
diff --git a/app/models/ability.rb b/app/models/ability.rb
index 618d4af4272..bb600eaccba 100644
--- a/app/models/ability.rb
+++ b/app/models/ability.rb
@@ -10,6 +10,14 @@ class Ability
end
end
+ # Given a list of users and a group this method returns the users that can
+ # read the given group.
+ def users_that_can_read_group(users, group)
+ DeclarativePolicy.subject_scope do
+ users.select { |u| allowed?(u, :read_group, group) }
+ end
+ end
+
# Given a list of users and a snippet this method returns the users that can
# read the given snippet.
def users_that_can_read_personal_snippet(users, snippet)
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index 862933bf127..451e512aef7 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -220,12 +220,15 @@ class ApplicationSetting < ActiveRecord::Base
end
end
+ validate :terms_exist, if: :enforce_terms?
+
before_validation :ensure_uuid!
before_save :ensure_runners_registration_token
before_save :ensure_health_check_access_token
after_commit do
+ reset_memoized_terms
Rails.cache.write(CACHE_KEY, self)
end
@@ -331,7 +334,8 @@ class ApplicationSetting < ActiveRecord::Base
gitaly_timeout_fast: 10,
gitaly_timeout_medium: 30,
gitaly_timeout_default: 55,
- allow_local_requests_from_hooks_and_services: false
+ allow_local_requests_from_hooks_and_services: false,
+ mirror_available: true
}
end
@@ -507,6 +511,16 @@ class ApplicationSetting < ActiveRecord::Base
password_authentication_enabled_for_web? || password_authentication_enabled_for_git?
end
+ delegate :terms, to: :latest_terms, allow_nil: true
+ def latest_terms
+ @latest_terms ||= Term.latest
+ end
+
+ def reset_memoized_terms
+ @latest_terms = nil
+ latest_terms
+ end
+
private
def ensure_uuid!
@@ -520,4 +534,10 @@ class ApplicationSetting < ActiveRecord::Base
errors.add(:repository_storages, "can't include: #{invalid.join(", ")}") unless
invalid.empty?
end
+
+ def terms_exist
+ return unless enforce_terms?
+
+ errors.add(:terms, "You need to set terms to be enforced") unless terms.present?
+ end
end
diff --git a/app/models/application_setting/term.rb b/app/models/application_setting/term.rb
new file mode 100644
index 00000000000..e8ce0ccbb71
--- /dev/null
+++ b/app/models/application_setting/term.rb
@@ -0,0 +1,13 @@
+class ApplicationSetting
+ class Term < ActiveRecord::Base
+ include CacheMarkdownField
+
+ validates :terms, presence: true
+
+ cache_markdown_field :terms
+
+ def self.latest
+ order(:id).last
+ end
+ end
+end
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 9000ad860e9..61c10c427dd 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -19,6 +19,7 @@ module Ci
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
+ has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
diff --git a/app/models/ci/build_trace_chunk.rb b/app/models/ci/build_trace_chunk.rb
new file mode 100644
index 00000000000..4856f10846c
--- /dev/null
+++ b/app/models/ci/build_trace_chunk.rb
@@ -0,0 +1,180 @@
+module Ci
+ class BuildTraceChunk < ActiveRecord::Base
+ include FastDestroyAll
+ extend Gitlab::Ci::Model
+
+ belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
+
+ default_value_for :data_store, :redis
+
+ WriteError = Class.new(StandardError)
+
+ CHUNK_SIZE = 128.kilobytes
+ CHUNK_REDIS_TTL = 1.week
+ WRITE_LOCK_RETRY = 10
+ WRITE_LOCK_SLEEP = 0.01.seconds
+ WRITE_LOCK_TTL = 1.minute
+
+ enum data_store: {
+ redis: 1,
+ db: 2
+ }
+
+ class << self
+ def redis_data_key(build_id, chunk_index)
+ "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}"
+ end
+
+ def redis_data_keys
+ redis.pluck(:build_id, :chunk_index).map do |data|
+ redis_data_key(data.first, data.second)
+ end
+ end
+
+ def redis_delete_data(keys)
+ return if keys.empty?
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.del(keys)
+ end
+ end
+
+ ##
+ # FastDestroyAll concerns
+ def begin_fast_destroy
+ redis_data_keys
+ end
+
+ ##
+ # FastDestroyAll concerns
+ def finalize_fast_destroy(keys)
+ redis_delete_data(keys)
+ end
+ end
+
+ ##
+ # Data is memoized for optimizing #size and #end_offset
+ def data
+ @data ||= get_data.to_s
+ end
+
+ def truncate(offset = 0)
+ raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
+ return if offset == size # Skip the following process as it doesn't affect anything
+
+ self.append("", offset)
+ end
+
+ def append(new_data, offset)
+ raise ArgumentError, 'Offset is out of range' if offset > size || offset < 0
+ raise ArgumentError, 'Chunk size overflow' if CHUNK_SIZE < (offset + new_data.bytesize)
+
+ set_data(data.byteslice(0, offset) + new_data)
+ end
+
+ def size
+ data&.bytesize.to_i
+ end
+
+ def start_offset
+ chunk_index * CHUNK_SIZE
+ end
+
+ def end_offset
+ start_offset + size
+ end
+
+ def range
+ (start_offset...end_offset)
+ end
+
+ def use_database!
+ in_lock do
+ break if db?
+ break unless size > 0
+
+ self.update!(raw_data: data, data_store: :db)
+ self.class.redis_delete_data([redis_data_key])
+ end
+ end
+
+ private
+
+ def get_data
+ if redis?
+ redis_data
+ elsif db?
+ raw_data
+ else
+ raise 'Unsupported data store'
+ end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
+ end
+
+ def set_data(value)
+ raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
+
+ in_lock do
+ if redis?
+ redis_set_data(value)
+ elsif db?
+ self.raw_data = value
+ else
+ raise 'Unsupported data store'
+ end
+
+ @data = value
+
+ save! if changed?
+ end
+
+ schedule_to_db if full?
+ end
+
+ def schedule_to_db
+ return if db?
+
+ Ci::BuildTraceChunkFlushWorker.perform_async(id)
+ end
+
+ def full?
+ size == CHUNK_SIZE
+ end
+
+ def redis_data
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.get(redis_data_key)
+ end
+ end
+
+ def redis_set_data(data)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(redis_data_key, data, ex: CHUNK_REDIS_TTL)
+ end
+ end
+
+ def redis_data_key
+ self.class.redis_data_key(build_id, chunk_index)
+ end
+
+ def in_lock
+ write_lock_key = "trace_write:#{build_id}:chunks:#{chunk_index}"
+
+ lease = Gitlab::ExclusiveLease.new(write_lock_key, timeout: WRITE_LOCK_TTL)
+ retry_count = 0
+
+ until uuid = lease.try_obtain
+ # Keep trying until we obtain the lease. To prevent hammering Redis too
+ # much we'll wait for a bit between retries.
+ sleep(WRITE_LOCK_SLEEP)
+ break if WRITE_LOCK_RETRY < (retry_count += 1)
+ end
+
+ raise WriteError, 'Failed to obtain write lock' unless uuid
+
+ self.reload if self.persisted?
+ return yield
+ ensure
+ Gitlab::ExclusiveLease.cancel(write_lock_key, uuid)
+ end
+ end
+end
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index e1b9bc76475..1f49764e7cc 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -32,15 +32,21 @@ module Ci
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
+ accepts_nested_attributes_for :variables, reject_if: :persisted?
+
delegate :id, to: :project, prefix: true
delegate :full_path, to: :project, prefix: true
- validates :source, exclusion: { in: %w(unknown), unless: :importing? }, on: :create
validates :sha, presence: { unless: :importing? }
validates :ref, presence: { unless: :importing? }
validates :status, presence: { unless: :importing? }
validate :valid_commit_sha, unless: :importing?
+ # Replace validator below with
+ # `validates :source, presence: { unless: :importing? }, on: :create`
+ # when removing Gitlab.rails5? code.
+ validate :valid_source, unless: :importing?, on: :create
+
after_create :keep_around_commits, unless: :importing?
enum source: {
@@ -269,19 +275,39 @@ module Ci
end
def git_author_name
- commit.try(:author_name)
+ strong_memoize(:git_author_name) do
+ commit.try(:author_name)
+ end
end
def git_author_email
- commit.try(:author_email)
+ strong_memoize(:git_author_email) do
+ commit.try(:author_email)
+ end
end
def git_commit_message
- commit.try(:message)
+ strong_memoize(:git_commit_message) do
+ commit.try(:message)
+ end
end
def git_commit_title
- commit.try(:title)
+ strong_memoize(:git_commit_title) do
+ commit.try(:title)
+ end
+ end
+
+ def git_commit_full_title
+ strong_memoize(:git_commit_full_title) do
+ commit.try(:full_title)
+ end
+ end
+
+ def git_commit_description
+ strong_memoize(:git_commit_description) do
+ commit.try(:description)
+ end
end
def short_sha
@@ -491,6 +517,9 @@ module Ci
.append(key: 'CI_PIPELINE_ID', value: id.to_s)
.append(key: 'CI_CONFIG_PATH', value: ci_yaml_file_path)
.append(key: 'CI_PIPELINE_SOURCE', value: source.to_s)
+ .append(key: 'CI_COMMIT_MESSAGE', value: git_commit_message)
+ .append(key: 'CI_COMMIT_TITLE', value: git_commit_full_title)
+ .append(key: 'CI_COMMIT_DESCRIPTION', value: git_commit_description)
end
def queued_duration
@@ -576,5 +605,11 @@ module Ci
project.repository.keep_around(self.sha)
project.repository.keep_around(self.before_sha)
end
+
+ def valid_source
+ if source.nil? || source == "unknown"
+ errors.add(:source, "invalid source")
+ end
+ end
end
end
diff --git a/app/models/ci/pipeline_variable.rb b/app/models/ci/pipeline_variable.rb
index de5aae17a15..38e14ffbc0c 100644
--- a/app/models/ci/pipeline_variable.rb
+++ b/app/models/ci/pipeline_variable.rb
@@ -5,6 +5,8 @@ module Ci
belongs_to :pipeline
+ alias_attribute :secret_value, :value
+
validates :key, uniqueness: { scope: :pipeline_id }
end
end
diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb
index 5a4c56ec0dc..bda69f85a78 100644
--- a/app/models/ci/runner.rb
+++ b/app/models/ci/runner.rb
@@ -14,32 +14,51 @@ module Ci
has_many :builds
has_many :runner_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :runner_projects
+ has_many :runner_namespaces
+ has_many :groups, through: :runner_namespaces
has_one :last_build, ->() { order('id DESC') }, class_name: 'Ci::Build'
before_validation :set_default_values
- scope :specific, ->() { where(is_shared: false) }
- scope :shared, ->() { where(is_shared: true) }
- scope :active, ->() { where(active: true) }
- scope :paused, ->() { where(active: false) }
- scope :online, ->() { where('contacted_at > ?', contact_time_deadline) }
- scope :ordered, ->() { order(id: :desc) }
+ scope :specific, -> { where(is_shared: false) }
+ scope :shared, -> { where(is_shared: true) }
+ scope :active, -> { where(active: true) }
+ scope :paused, -> { where(active: false) }
+ scope :online, -> { where('contacted_at > ?', contact_time_deadline) }
+ scope :ordered, -> { order(id: :desc) }
- scope :owned_or_shared, ->(project_id) do
- joins('LEFT JOIN ci_runner_projects ON ci_runner_projects.runner_id = ci_runners.id')
- .where("ci_runner_projects.project_id = :project_id OR ci_runners.is_shared = true", project_id: project_id)
+ scope :belonging_to_project, -> (project_id) {
+ joins(:runner_projects).where(ci_runner_projects: { project_id: project_id })
+ }
+
+ scope :belonging_to_parent_group_of_project, -> (project_id) {
+ project_groups = ::Group.joins(:projects).where(projects: { id: project_id })
+ hierarchy_groups = Gitlab::GroupHierarchy.new(project_groups).base_and_ancestors
+
+ joins(:groups).where(namespaces: { id: hierarchy_groups })
+ }
+
+ scope :owned_or_shared, -> (project_id) do
+ union = Gitlab::SQL::Union.new(
+ [belonging_to_project(project_id), belonging_to_parent_group_of_project(project_id), shared],
+ remove_duplicates: false
+ )
+ from("(#{union.to_sql}) ci_runners")
end
scope :assignable_for, ->(project) do
# FIXME: That `to_sql` is needed to workaround a weird Rails bug.
# Without that, placeholders would miss one and couldn't match.
where(locked: false)
- .where.not("id IN (#{project.runners.select(:id).to_sql})").specific
+ .where.not("ci_runners.id IN (#{project.runners.select(:id).to_sql})")
+ .specific
end
validate :tag_constraints
+ validate :either_projects_or_group
validates :access_level, presence: true
+ validates :runner_type, presence: true
acts_as_taggable
@@ -50,6 +69,12 @@ module Ci
ref_protected: 1
}
+ enum runner_type: {
+ instance_type: 1,
+ group_type: 2,
+ project_type: 3
+ }
+
cached_attr_reader :version, :revision, :platform, :architecture, :contacted_at, :ip_address
chronic_duration_attr :maximum_timeout_human_readable, :maximum_timeout
@@ -83,7 +108,13 @@ module Ci
end
def assign_to(project, current_user = nil)
- self.is_shared = false if shared?
+ if shared?
+ self.is_shared = false if shared?
+ self.runner_type = :project_type
+ elsif group_type?
+ raise ArgumentError, 'Transitioning a group runner to a project runner is not supported'
+ end
+
self.save
project.runner_projects.create(runner_id: self.id)
end
@@ -120,6 +151,14 @@ module Ci
!shared?
end
+ def assigned_to_group?
+ runner_namespaces.any?
+ end
+
+ def assigned_to_project?
+ runner_projects.any?
+ end
+
def can_pick?(build)
return false if self.ref_protected? && !build.protected?
@@ -174,6 +213,12 @@ module Ci
end
end
+ def pick_build!(build)
+ if can_pick?(build)
+ tick_runner_queue
+ end
+ end
+
private
def cleanup_runner_queue
@@ -205,7 +250,17 @@ module Ci
end
def assignable_for?(project_id)
- is_shared? || projects.exists?(id: project_id)
+ self.class.owned_or_shared(project_id).where(id: self.id).any?
+ end
+
+ def either_projects_or_group
+ if groups.many?
+ errors.add(:runner, 'can only be assigned to one group')
+ end
+
+ if assigned_to_group? && assigned_to_project?
+ errors.add(:runner, 'can only be assigned either to projects or to a group')
+ end
end
def accepting_tags?(build)
diff --git a/app/models/ci/runner_namespace.rb b/app/models/ci/runner_namespace.rb
new file mode 100644
index 00000000000..3269f86e8ca
--- /dev/null
+++ b/app/models/ci/runner_namespace.rb
@@ -0,0 +1,9 @@
+module Ci
+ class RunnerNamespace < ActiveRecord::Base
+ extend Gitlab::Ci::Model
+
+ belongs_to :runner
+ belongs_to :namespace, class_name: '::Namespace'
+ belongs_to :group, class_name: '::Group', foreign_key: :namespace_id
+ end
+end
diff --git a/app/models/clusters/applications/runner.rb b/app/models/clusters/applications/runner.rb
index 16efe90fa27..b881b4eaf36 100644
--- a/app/models/clusters/applications/runner.rb
+++ b/app/models/clusters/applications/runner.rb
@@ -43,12 +43,20 @@ module Clusters
def create_and_assign_runner
transaction do
- project.runners.create!(name: 'kubernetes-cluster', tag_list: %w(kubernetes cluster)).tap do |runner|
+ project.runners.create!(runner_create_params).tap do |runner|
update!(runner_id: runner.id)
end
end
end
+ def runner_create_params
+ {
+ name: 'kubernetes-cluster',
+ runner_type: :project_type,
+ tag_list: %w(kubernetes cluster)
+ }
+ end
+
def gitlab_url
Gitlab::Routing.url_helpers.root_url(only_path: false)
end
diff --git a/app/models/concerns/fast_destroy_all.rb b/app/models/concerns/fast_destroy_all.rb
new file mode 100644
index 00000000000..7ea042c6742
--- /dev/null
+++ b/app/models/concerns/fast_destroy_all.rb
@@ -0,0 +1,91 @@
+##
+# This module is for replacing `dependent: :destroy` and `before_destroy` hooks.
+#
+# In general, `destroy_all` is inefficient because it calls each callback with `DELETE` queries i.e. O(n), whereas,
+# `delete_all` is efficient as it deletes all rows with a single `DELETE` query.
+#
+# It's better to use `delete_all` as our best practice, however,
+# if external data (e.g. ObjectStorage, FileStorage or Redis) are assosiated with database records,
+# it is difficult to accomplish it.
+#
+# This module defines a format to use `delete_all` and delete associated external data.
+# Here is an exmaple
+#
+# Situation
+# - `Project` has many `Ci::BuildTraceChunk` through `Ci::Build`
+# - `Ci::BuildTraceChunk` stores associated data in Redis, so it relies on `dependent: :destroy` and `before_destroy` for the deletion
+#
+# How to use
+# - Define `use_fast_destroy :build_trace_chunks` in `Project` model.
+# - Define `begin_fast_destroy` and `finalize_fast_destroy(params)` in `Ci::BuildTraceChunk` model.
+# - Use `fast_destroy_all` instead of `destroy` and `destroy_all`
+# - Remove `dependent: :destroy` and `before_destroy` as it's no longer need
+#
+# Expectation
+# - When a project is `destroy`ed, the associated trace_chunks will be deleted by `delete_all`,
+# and the associated data will be removed, too.
+# - When `fast_destroy_all` is called, it also performns as same.
+module FastDestroyAll
+ extend ActiveSupport::Concern
+
+ ForbiddenActionError = Class.new(StandardError)
+
+ included do
+ before_destroy do
+ raise ForbiddenActionError, '`destroy` and `destroy_all` are forbbiden. Please use `fast_destroy_all`'
+ end
+ end
+
+ class_methods do
+ ##
+ # This method delete rows and associated external data efficiently
+ #
+ # This method can replace `destroy` and `destroy_all` without having `after_destroy` hook
+ def fast_destroy_all
+ params = begin_fast_destroy
+
+ delete_all
+
+ finalize_fast_destroy(params)
+ end
+
+ ##
+ # This method returns identifiers to delete associated external data (e.g. file paths, redis keys)
+ #
+ # This method must be defined in fast destroyable model
+ def begin_fast_destroy
+ raise NotImplementedError
+ end
+
+ ##
+ # This method deletes associated external data with the identifiers returned by `begin_fast_destroy`
+ #
+ # This method must be defined in fast destroyable model
+ def finalize_fast_destroy(params)
+ raise NotImplementedError
+ end
+ end
+
+ module Helpers
+ extend ActiveSupport::Concern
+
+ class_methods do
+ ##
+ # This method is to be defined on models which have fast destroyable models as children,
+ # and let us avoid to use `dependent: :destroy` hook
+ def use_fast_destroy(relation)
+ before_destroy(prepend: true) do
+ perform_fast_destroy(public_send(relation)) # rubocop:disable GitlabSecurity/PublicSend
+ end
+ end
+ end
+
+ def perform_fast_destroy(subject)
+ params = subject.begin_fast_destroy
+
+ run_after_commit do
+ subject.finalize_fast_destroy(params)
+ end
+ end
+ end
+end
diff --git a/app/models/concerns/participable.rb b/app/models/concerns/participable.rb
index e48bc0be410..01b1ef9f82c 100644
--- a/app/models/concerns/participable.rb
+++ b/app/models/concerns/participable.rb
@@ -98,6 +98,10 @@ module Participable
participants.merge(ext.users)
+ filter_by_ability(participants)
+ end
+
+ def filter_by_ability(participants)
case self
when PersonalSnippet
Ability.users_that_can_read_personal_snippet(participants.to_a, self)
diff --git a/app/models/concerns/reactive_caching.rb b/app/models/concerns/reactive_caching.rb
index 2589215ad19..eef9caf1c8e 100644
--- a/app/models/concerns/reactive_caching.rb
+++ b/app/models/concerns/reactive_caching.rb
@@ -60,13 +60,16 @@ module ReactiveCaching
end
def with_reactive_cache(*args, &blk)
- within_reactive_cache_lifetime(*args) do
+ bootstrap = !within_reactive_cache_lifetime?(*args)
+ Rails.cache.write(alive_reactive_cache_key(*args), true, expires_in: self.class.reactive_cache_lifetime)
+
+ if bootstrap
+ ReactiveCachingWorker.perform_async(self.class, id, *args)
+ nil
+ else
data = Rails.cache.read(full_reactive_cache_key(*args))
yield data if data.present?
end
- ensure
- Rails.cache.write(alive_reactive_cache_key(*args), true, expires_in: self.class.reactive_cache_lifetime)
- ReactiveCachingWorker.perform_async(self.class, id, *args)
end
def clear_reactive_cache!(*args)
@@ -75,7 +78,7 @@ module ReactiveCaching
def exclusively_update_reactive_cache!(*args)
locking_reactive_cache(*args) do
- within_reactive_cache_lifetime(*args) do
+ if within_reactive_cache_lifetime?(*args)
enqueuing_update(*args) do
value = calculate_reactive_cache(*args)
Rails.cache.write(full_reactive_cache_key(*args), value)
@@ -105,8 +108,8 @@ module ReactiveCaching
Gitlab::ExclusiveLease.cancel(full_reactive_cache_key(*args), uuid)
end
- def within_reactive_cache_lifetime(*args)
- yield if Rails.cache.read(alive_reactive_cache_key(*args))
+ def within_reactive_cache_lifetime?(*args)
+ !!Rails.cache.read(alive_reactive_cache_key(*args))
end
def enqueuing_update(*args)
diff --git a/app/models/concerns/routable.rb b/app/models/concerns/routable.rb
index 915ad6959be..0176a12a131 100644
--- a/app/models/concerns/routable.rb
+++ b/app/models/concerns/routable.rb
@@ -4,7 +4,9 @@ module Routable
extend ActiveSupport::Concern
included do
- has_one :route, as: :source, autosave: true, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ # Remove `inverse_of: source` when upgraded to rails 5.2
+ # See https://github.com/rails/rails/pull/28808
+ has_one :route, as: :source, autosave: true, dependent: :destroy, inverse_of: :source # rubocop:disable Cop/ActiveRecordDependent
has_many :redirect_routes, as: :source, autosave: true, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
validates :route, presence: true
diff --git a/app/models/concerns/sha_attribute.rb b/app/models/concerns/sha_attribute.rb
index 703a72c355c..3796737427a 100644
--- a/app/models/concerns/sha_attribute.rb
+++ b/app/models/concerns/sha_attribute.rb
@@ -4,18 +4,34 @@ module ShaAttribute
module ClassMethods
def sha_attribute(name)
return if ENV['STATIC_VERIFICATION']
- return unless table_exists?
+
+ validate_binary_column_exists!(name) unless Rails.env.production?
+
+ attribute(name, Gitlab::Database::ShaAttribute.new)
+ end
+
+ # This only gets executed in non-production environments as an additional check to ensure
+ # the column is the correct type. In production it should behave like any other attribute.
+ # See https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/5502 for more discussion
+ def validate_binary_column_exists!(name)
+ unless table_exists?
+ warn "WARNING: sha_attribute #{name.inspect} is invalid since the table doesn't exist - you may need to run database migrations"
+ return
+ end
column = columns.find { |c| c.name == name.to_s }
- # In case the table doesn't exist we won't be able to find the column,
- # thus we will only check the type if the column is present.
- if column && column.type != :binary
- raise ArgumentError,
- "sha_attribute #{name.inspect} is invalid since the column type is not :binary"
+ unless column
+ warn "WARNING: sha_attribute #{name.inspect} is invalid since the column doesn't exist - you may need to run database migrations"
+ return
end
- attribute(name, Gitlab::Database::ShaAttribute.new)
+ unless column.type == :binary
+ raise ArgumentError.new("sha_attribute #{name.inspect} is invalid since the column type is not :binary")
+ end
+ rescue => error
+ Gitlab::AppLogger.error "ShaAttribute initialization: #{error.message}"
+ raise
end
end
end
diff --git a/app/models/concerns/time_trackable.rb b/app/models/concerns/time_trackable.rb
index 5911b56c34c..73fc5048dcf 100644
--- a/app/models/concerns/time_trackable.rb
+++ b/app/models/concerns/time_trackable.rb
@@ -30,6 +30,8 @@ module TimeTrackable
return if @time_spent == 0
+ touch if touchable?
+
if @time_spent == :reset
reset_spent_time
else
@@ -53,6 +55,10 @@ module TimeTrackable
private
+ def touchable?
+ valid? && persisted?
+ end
+
def reset_spent_time
timelogs.new(time_spent: total_time_spent * -1, user: @time_spent_user) # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
diff --git a/app/models/group.rb b/app/models/group.rb
index 9b42bbf99be..cefca316399 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -9,6 +9,7 @@ class Group < Namespace
include SelectForProjectAuthorization
include LoadedInGroupList
include GroupDescendant
+ include TokenAuthenticatable
has_many :group_members, -> { where(requested_at: nil) }, dependent: :destroy, as: :source # rubocop:disable Cop/ActiveRecordDependent
alias_method :members, :group_members
@@ -43,6 +44,8 @@ class Group < Namespace
validates :two_factor_grace_period, presence: true, numericality: { greater_than_or_equal_to: 0 }
+ add_authentication_token_field :runners_token
+
after_create :post_create_hook
after_destroy :post_destroy_hook
after_save :update_two_factor_requirement
@@ -238,6 +241,13 @@ class Group < Namespace
.where(source_id: self_and_descendants.reorder(nil).select(:id))
end
+ # Returns all members that are part of the group, it's subgroups, and ancestor groups
+ def direct_and_indirect_members
+ GroupMember
+ .active_without_invites_and_requests
+ .where(source_id: self_and_hierarchy.reorder(nil).select(:id))
+ end
+
def users_with_parents
User
.where(id: members_with_parents.select(:user_id))
@@ -250,6 +260,30 @@ class Group < Namespace
.reorder(nil)
end
+ # Returns all users that are members of the group because:
+ # 1. They belong to the group
+ # 2. They belong to a project that belongs to the group
+ # 3. They belong to a sub-group or project in such sub-group
+ # 4. They belong to an ancestor group
+ def direct_and_indirect_users
+ union = Gitlab::SQL::Union.new([
+ User
+ .where(id: direct_and_indirect_members.select(:user_id))
+ .reorder(nil),
+ project_users_with_descendants
+ ])
+
+ User.from("(#{union.to_sql}) #{User.table_name}")
+ end
+
+ # Returns all users that are members of projects
+ # belonging to the current group or sub-groups
+ def project_users_with_descendants
+ User
+ .joins(projects: :group)
+ .where(namespaces: { id: self_and_descendants.select(:id) })
+ end
+
def max_member_access_for_user(user)
return GroupMember::OWNER if user.admin?
@@ -294,6 +328,13 @@ class Group < Namespace
refresh_members_authorized_projects(blocking: false)
end
+ # each existing group needs to have a `runners_token`.
+ # we do this on read since migrating all existing groups is not a feasible
+ # solution.
+ def runners_token
+ ensure_runners_token!
+ end
+
private
def update_two_factor_requirement
diff --git a/app/models/identity.rb b/app/models/identity.rb
index 1011b9f1109..3fd0c5e751d 100644
--- a/app/models/identity.rb
+++ b/app/models/identity.rb
@@ -1,12 +1,16 @@
class Identity < ActiveRecord::Base
+ def self.uniqueness_scope
+ :provider
+ end
+
include Sortable
include CaseSensitivity
belongs_to :user
validates :provider, presence: true
- validates :extern_uid, allow_blank: true, uniqueness: { scope: :provider, case_sensitive: false }
- validates :user_id, uniqueness: { scope: :provider }
+ validates :extern_uid, allow_blank: true, uniqueness: { scope: uniqueness_scope, case_sensitive: false }
+ validates :user_id, uniqueness: { scope: uniqueness_scope }
before_save :ensure_normalized_extern_uid, if: :extern_uid_changed?
after_destroy :clear_user_synced_attributes, if: :user_synced_attributes_metadata_from_provider?
diff --git a/app/models/member.rb b/app/models/member.rb
index eac4a22a03f..68572f2e33a 100644
--- a/app/models/member.rb
+++ b/app/models/member.rb
@@ -96,6 +96,17 @@ class Member < ActiveRecord::Base
joins(:user).merge(User.search(query))
end
+ def filter_by_2fa(value)
+ case value
+ when 'enabled'
+ left_join_users.merge(User.with_two_factor_indistinct)
+ when 'disabled'
+ left_join_users.merge(User.without_two_factor)
+ else
+ all
+ end
+ end
+
def sort_by_attribute(method)
case method.to_s
when 'access_level_asc' then reorder(access_level: :asc)
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index 3f924992db7..1d2f0856dbb 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -323,7 +323,7 @@ class MergeRequest < ActiveRecord::Base
# updates `merge_jid` with the MergeWorker#jid.
# This helps tracking enqueued and ongoing merge jobs.
def merge_async(user_id, params)
- jid = MergeWorker.perform_async(id, user_id, params)
+ jid = MergeWorker.perform_async(id, user_id, params.to_h)
update_column(:merge_jid, jid)
end
@@ -1007,6 +1007,10 @@ class MergeRequest < ActiveRecord::Base
@merge_commit ||= project.commit(merge_commit_sha) if merge_commit_sha
end
+ def short_merge_commit_sha
+ Commit.truncate_sha(merge_commit_sha) if merge_commit_sha
+ end
+
def can_be_reverted?(current_user)
return false unless merge_commit
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index c29a53e5ce7..3dad4277713 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -21,6 +21,9 @@ class Namespace < ActiveRecord::Base
has_many :projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :project_statistics
+ has_many :runner_namespaces, class_name: 'Ci::RunnerNamespace'
+ has_many :runners, through: :runner_namespaces, source: :runner, class_name: 'Ci::Runner'
+
# This should _not_ be `inverse_of: :namespace`, because that would also set
# `user.namespace` when this user creates a group with themselves as `owner`.
belongs_to :owner, class_name: "User"
@@ -163,6 +166,13 @@ class Namespace < ActiveRecord::Base
projects.with_shared_runners.any?
end
+ # Returns all ancestors, self, and descendants of the current namespace.
+ def self_and_hierarchy
+ Gitlab::GroupHierarchy
+ .new(self.class.where(id: id))
+ .all_groups
+ end
+
# Returns all the ancestors of the current namespaces.
def ancestors
return self.class.none unless parent_id
diff --git a/app/models/note.rb b/app/models/note.rb
index e426f84832b..109405d3f17 100644
--- a/app/models/note.rb
+++ b/app/models/note.rb
@@ -317,10 +317,6 @@ class Note < ActiveRecord::Base
!system? && !for_snippet?
end
- def can_create_notification?
- true
- end
-
def discussion_class(noteable = nil)
# When commit notes are rendered on an MR's Discussion page, they are
# displayed in one discussion instead of individually.
diff --git a/app/models/project.rb b/app/models/project.rb
index d4e9e51c7be..534a0e630af 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -22,6 +22,7 @@ class Project < ActiveRecord::Base
include DeploymentPlatform
include ::Gitlab::Utils::StrongMemoize
include ChronicDurationAttribute
+ include FastDestroyAll::Helpers
extend Gitlab::ConfigHelper
@@ -64,9 +65,15 @@ class Project < ActiveRecord::Base
default_value_for :only_allow_merge_if_all_discussions_are_resolved, false
add_authentication_token_field :runners_token
+
+ before_validation :mark_remote_mirrors_for_removal, if: -> { ActiveRecord::Base.connection.table_exists?(:remote_mirrors) }
+
before_save :ensure_runners_token
after_save :update_project_statistics, if: :namespace_id_changed?
+
+ after_save :create_import_state, if: ->(project) { project.import? && project.import_state.nil? }
+
after_create :create_project_feature, unless: :project_feature
after_create :create_ci_cd_settings,
@@ -78,6 +85,9 @@ class Project < ActiveRecord::Base
after_update :update_forks_visibility_level
before_destroy :remove_private_deploy_keys
+
+ use_fast_destroy :build_trace_chunks
+
after_destroy -> { run_after_commit { remove_pages } }
after_destroy :remove_exports
@@ -157,6 +167,8 @@ class Project < ActiveRecord::Base
has_one :fork_network_member
has_one :fork_network, through: :fork_network_member
+ has_one :import_state, autosave: true, class_name: 'ProjectImportState', inverse_of: :project
+
# Merge Requests for target project should be removed with it
has_many :merge_requests, foreign_key: 'target_project_id'
has_many :source_of_merge_requests, foreign_key: 'source_project_id', class_name: 'MergeRequest'
@@ -205,6 +217,7 @@ class Project < ActiveRecord::Base
has_one :cluster_project, class_name: 'Clusters::Project'
has_many :clusters, through: :cluster_project, class_name: 'Clusters::Cluster'
+ has_many :cluster_ingresses, through: :clusters, source: :application_ingress, class_name: 'Clusters::Applications::Ingress'
# Container repositories need to remove data from the container registry,
# which is not managed by the DB. Hence we're still using dependent: :destroy
@@ -220,6 +233,7 @@ class Project < ActiveRecord::Base
# still using `dependent: :destroy` here.
has_many :builds, class_name: 'Ci::Build', inverse_of: :project, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :build_trace_section_names, class_name: 'Ci::BuildTraceSectionName'
+ has_many :build_trace_chunks, class_name: 'Ci::BuildTraceChunk', through: :builds, source: :trace_chunks
has_many :runner_projects, class_name: 'Ci::RunnerProject'
has_many :runners, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
has_many :variables, class_name: 'Ci::Variable'
@@ -230,23 +244,28 @@ class Project < ActiveRecord::Base
has_many :project_deploy_tokens
has_many :deploy_tokens, through: :project_deploy_tokens
- has_many :active_runners, -> { active }, through: :runner_projects, source: :runner, class_name: 'Ci::Runner'
-
has_one :auto_devops, class_name: 'ProjectAutoDevops'
has_many :custom_attributes, class_name: 'ProjectCustomAttribute'
has_many :project_badges, class_name: 'ProjectBadge'
- has_one :ci_cd_settings, class_name: 'ProjectCiCdSetting'
+ has_one :ci_cd_settings, class_name: 'ProjectCiCdSetting', inverse_of: :project, autosave: true, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+
+ has_many :remote_mirrors, inverse_of: :project
accepts_nested_attributes_for :variables, allow_destroy: true
accepts_nested_attributes_for :project_feature, update_only: true
accepts_nested_attributes_for :import_data
accepts_nested_attributes_for :auto_devops, update_only: true
+ accepts_nested_attributes_for :remote_mirrors,
+ allow_destroy: true,
+ reject_if: ->(attrs) { attrs[:id].blank? && attrs[:url].blank? }
+
delegate :name, to: :owner, allow_nil: true, prefix: true
delegate :members, to: :team, prefix: true
delegate :add_user, :add_users, to: :team
delegate :add_guest, :add_reporter, :add_developer, :add_master, :add_role, to: :team
+ delegate :group_runners_enabled, :group_runners_enabled=, :group_runners_enabled?, to: :ci_cd_settings
# Validations
validates :creator, presence: true, on: :create
@@ -331,6 +350,12 @@ class Project < ActiveRecord::Base
scope :with_issues_enabled, -> { with_feature_enabled(:issues) }
scope :with_issues_available_for_user, ->(current_user) { with_feature_available_for_user(:issues, current_user) }
scope :with_merge_requests_enabled, -> { with_feature_enabled(:merge_requests) }
+ scope :with_remote_mirrors, -> { joins(:remote_mirrors).where(remote_mirrors: { enabled: true }).distinct }
+
+ scope :with_group_runners_enabled, -> do
+ joins(:ci_cd_settings)
+ .where(project_ci_cd_settings: { group_runners_enabled: true })
+ end
enum auto_cancel_pending_pipelines: { disabled: 0, enabled: 1 }
@@ -381,55 +406,9 @@ class Project < ActiveRecord::Base
scope :abandoned, -> { where('projects.last_activity_at < ?', 6.months.ago) }
scope :excluding_project, ->(project) { where.not(id: project) }
- scope :import_started, -> { where(import_status: 'started') }
-
- state_machine :import_status, initial: :none do
- event :import_schedule do
- transition [:none, :finished, :failed] => :scheduled
- end
-
- event :force_import_start do
- transition [:none, :finished, :failed] => :started
- end
-
- event :import_start do
- transition scheduled: :started
- end
-
- event :import_finish do
- transition started: :finished
- end
-
- event :import_fail do
- transition [:scheduled, :started] => :failed
- end
-
- event :import_retry do
- transition failed: :started
- end
-
- state :scheduled
- state :started
- state :finished
- state :failed
-
- after_transition [:none, :finished, :failed] => :scheduled do |project, _|
- project.run_after_commit do
- job_id = add_import_job
- update(import_jid: job_id) if job_id
- end
- end
- after_transition started: :finished do |project, _|
- project.reset_cache_and_import_attrs
-
- if Gitlab::ImportSources.importer_names.include?(project.import_type) && project.repo_exists?
- project.run_after_commit do
- Projects::AfterImportService.new(project).execute
- end
- end
- end
- end
+ scope :joins_import_state, -> { joins("LEFT JOIN project_mirror_data import_state ON import_state.project_id = projects.id") }
+ scope :import_started, -> { joins_import_state.where("import_state.status = 'started' OR projects.import_status = 'started'") }
class << self
# Searches for a list of projects based on the query given in `query`.
@@ -659,10 +638,6 @@ class Project < ActiveRecord::Base
external_import? || forked? || gitlab_project_import? || bare_repository_import?
end
- def no_import?
- import_status == 'none'
- end
-
def external_import?
import_url.present?
end
@@ -675,6 +650,99 @@ class Project < ActiveRecord::Base
import_started? || import_scheduled?
end
+ def import_state_args
+ {
+ status: self[:import_status],
+ jid: self[:import_jid],
+ last_error: self[:import_error]
+ }
+ end
+
+ def ensure_import_state(force: false)
+ return if !force && (self[:import_status] == 'none' || self[:import_status].nil?)
+ return unless import_state.nil?
+
+ if persisted?
+ create_import_state(import_state_args)
+
+ update_column(:import_status, 'none')
+ else
+ build_import_state(import_state_args)
+
+ self[:import_status] = 'none'
+ end
+ end
+
+ def import_schedule
+ ensure_import_state(force: true)
+
+ import_state.schedule
+ end
+
+ def force_import_start
+ ensure_import_state(force: true)
+
+ import_state.force_start
+ end
+
+ def import_start
+ ensure_import_state(force: true)
+
+ import_state.start
+ end
+
+ def import_fail
+ ensure_import_state(force: true)
+
+ import_state.fail_op
+ end
+
+ def import_finish
+ ensure_import_state(force: true)
+
+ import_state.finish
+ end
+
+ def import_jid=(new_jid)
+ ensure_import_state(force: true)
+
+ import_state.jid = new_jid
+ end
+
+ def import_jid
+ ensure_import_state
+
+ import_state&.jid
+ end
+
+ def import_error=(new_error)
+ ensure_import_state(force: true)
+
+ import_state.last_error = new_error
+ end
+
+ def import_error
+ ensure_import_state
+
+ import_state&.last_error
+ end
+
+ def import_status=(new_status)
+ ensure_import_state(force: true)
+
+ import_state.status = new_status
+ end
+
+ def import_status
+ ensure_import_state
+
+ import_state&.status || 'none'
+ end
+
+ def no_import?
+ import_status == 'none'
+ end
+
def import_started?
# import? does SQL work so only run it if it looks like there's an import running
import_status == 'started' && import?
@@ -708,6 +776,37 @@ class Project < ActiveRecord::Base
import_type == 'gitea'
end
+ def has_remote_mirror?
+ remote_mirror_available? && remote_mirrors.enabled.exists?
+ end
+
+ def updating_remote_mirror?
+ remote_mirrors.enabled.started.exists?
+ end
+
+ def update_remote_mirrors
+ return unless remote_mirror_available?
+
+ remote_mirrors.enabled.each(&:sync)
+ end
+
+ def mark_stuck_remote_mirrors_as_failed!
+ remote_mirrors.stuck.update_all(
+ update_status: :failed,
+ last_error: 'The remote mirror took to long to complete.',
+ last_update_at: Time.now
+ )
+ end
+
+ def mark_remote_mirrors_for_removal
+ remote_mirrors.each(&:mark_for_delete_if_blank_url)
+ end
+
+ def remote_mirror_available?
+ remote_mirror_available_overridden ||
+ ::Gitlab::CurrentSettings.mirror_available
+ end
+
def check_limit
unless creator.can_create_project? || namespace.kind == 'group'
projects_limit = creator.projects_limit
@@ -1301,12 +1400,17 @@ class Project < ActiveRecord::Base
@shared_runners ||= shared_runners_available? ? Ci::Runner.shared : Ci::Runner.none
end
- def active_shared_runners
- @active_shared_runners ||= shared_runners.active
+ def group_runners
+ @group_runners ||= group_runners_enabled? ? Ci::Runner.belonging_to_parent_group_of_project(self.id) : Ci::Runner.none
+ end
+
+ def all_runners
+ union = Gitlab::SQL::Union.new([runners, group_runners, shared_runners])
+ Ci::Runner.from("(#{union.to_sql}) ci_runners")
end
def any_runners?(&block)
- active_runners.any?(&block) || active_shared_runners.any?(&block)
+ all_runners.active.any?(&block)
end
def valid_runners_token?(token)
@@ -1471,7 +1575,7 @@ class Project < ActiveRecord::Base
def rename_repo_notify!
# When we import a project overwriting the original project, there
# is a move operation. In that case we don't want to send the instructions.
- send_move_instructions(full_path_was) unless started?
+ send_move_instructions(full_path_was) unless import_started?
expires_full_path_cache
self.old_path_with_namespace = full_path_was
@@ -1525,7 +1629,8 @@ class Project < ActiveRecord::Base
return unless import_jid
Gitlab::SidekiqStatus.unset(import_jid)
- update_column(:import_jid, nil)
+
+ import_state.update_column(:jid, nil)
end
def running_or_pending_build_count(force: false)
@@ -1544,7 +1649,8 @@ class Project < ActiveRecord::Base
sanitized_message = Gitlab::UrlSanitizer.sanitize(error_message)
import_fail
- update_column(:import_error, sanitized_message)
+
+ import_state.update_column(:last_error, sanitized_message)
rescue ActiveRecord::ActiveRecordError => e
Rails.logger.error("Error setting import status to failed: #{e.message}. Original error: #{sanitized_message}")
ensure
@@ -1874,6 +1980,10 @@ class Project < ActiveRecord::Base
[]
end
+ def toggle_ci_cd_settings!(settings_attribute)
+ ci_cd_settings.toggle!(settings_attribute)
+ end
+
def gitlab_deploy_token
@gitlab_deploy_token ||= deploy_tokens.gitlab_deploy_token
end
diff --git a/app/models/project_ci_cd_setting.rb b/app/models/project_ci_cd_setting.rb
index 9f10a93148c..588cced5781 100644
--- a/app/models/project_ci_cd_setting.rb
+++ b/app/models/project_ci_cd_setting.rb
@@ -1,5 +1,5 @@
class ProjectCiCdSetting < ActiveRecord::Base
- belongs_to :project
+ belongs_to :project, inverse_of: :ci_cd_settings
# The version of the schema that first introduced this model/table.
MINIMUM_SCHEMA_VERSION = 20180403035759
diff --git a/app/models/project_import_state.rb b/app/models/project_import_state.rb
new file mode 100644
index 00000000000..1605317ae14
--- /dev/null
+++ b/app/models/project_import_state.rb
@@ -0,0 +1,55 @@
+class ProjectImportState < ActiveRecord::Base
+ include AfterCommitQueue
+
+ self.table_name = "project_mirror_data"
+
+ belongs_to :project, inverse_of: :import_state
+
+ validates :project, presence: true
+
+ state_machine :status, initial: :none do
+ event :schedule do
+ transition [:none, :finished, :failed] => :scheduled
+ end
+
+ event :force_start do
+ transition [:none, :finished, :failed] => :started
+ end
+
+ event :start do
+ transition scheduled: :started
+ end
+
+ event :finish do
+ transition started: :finished
+ end
+
+ event :fail_op do
+ transition [:scheduled, :started] => :failed
+ end
+
+ state :scheduled
+ state :started
+ state :finished
+ state :failed
+
+ after_transition [:none, :finished, :failed] => :scheduled do |state, _|
+ state.run_after_commit do
+ job_id = project.add_import_job
+ update(jid: job_id) if job_id
+ end
+ end
+
+ after_transition started: :finished do |state, _|
+ project = state.project
+
+ project.reset_cache_and_import_attrs
+
+ if Gitlab::ImportSources.importer_names.include?(project.import_type) && project.repo_exists?
+ state.run_after_commit do
+ Projects::AfterImportService.new(project).execute
+ end
+ end
+ end
+ end
+end
diff --git a/app/models/remote_mirror.rb b/app/models/remote_mirror.rb
new file mode 100644
index 00000000000..bbf8fd9c6a7
--- /dev/null
+++ b/app/models/remote_mirror.rb
@@ -0,0 +1,219 @@
+class RemoteMirror < ActiveRecord::Base
+ include AfterCommitQueue
+
+ PROTECTED_BACKOFF_DELAY = 1.minute
+ UNPROTECTED_BACKOFF_DELAY = 5.minutes
+
+ attr_encrypted :credentials,
+ key: Gitlab::Application.secrets.db_key_base,
+ marshal: true,
+ encode: true,
+ mode: :per_attribute_iv_and_salt,
+ insecure_mode: true,
+ algorithm: 'aes-256-cbc'
+
+ default_value_for :only_protected_branches, true
+
+ belongs_to :project, inverse_of: :remote_mirrors
+
+ validates :url, presence: true, url: { protocols: %w(ssh git http https), allow_blank: true }
+ validates :url, addressable_url: true, if: :url_changed?
+
+ before_save :set_new_remote_name, if: :mirror_url_changed?
+
+ after_save :set_override_remote_mirror_available, unless: -> { Gitlab::CurrentSettings.current_application_settings.mirror_available }
+ after_save :refresh_remote, if: :mirror_url_changed?
+ after_update :reset_fields, if: :mirror_url_changed?
+
+ after_commit :remove_remote, on: :destroy
+
+ scope :enabled, -> { where(enabled: true) }
+ scope :started, -> { with_update_status(:started) }
+ scope :stuck, -> { started.where('last_update_at < ? OR (last_update_at IS NULL AND updated_at < ?)', 1.day.ago, 1.day.ago) }
+
+ state_machine :update_status, initial: :none do
+ event :update_start do
+ transition [:none, :finished, :failed] => :started
+ end
+
+ event :update_finish do
+ transition started: :finished
+ end
+
+ event :update_fail do
+ transition started: :failed
+ end
+
+ state :started
+ state :finished
+ state :failed
+
+ after_transition any => :started do |remote_mirror, _|
+ Gitlab::Metrics.add_event(:remote_mirrors_running, path: remote_mirror.project.full_path)
+
+ remote_mirror.update(last_update_started_at: Time.now)
+ end
+
+ after_transition started: :finished do |remote_mirror, _|
+ Gitlab::Metrics.add_event(:remote_mirrors_finished, path: remote_mirror.project.full_path)
+
+ timestamp = Time.now
+ remote_mirror.update_attributes!(
+ last_update_at: timestamp, last_successful_update_at: timestamp, last_error: nil
+ )
+ end
+
+ after_transition started: :failed do |remote_mirror, _|
+ Gitlab::Metrics.add_event(:remote_mirrors_failed, path: remote_mirror.project.full_path)
+
+ remote_mirror.update(last_update_at: Time.now)
+ end
+ end
+
+ def remote_name
+ super || fallback_remote_name
+ end
+
+ def update_failed?
+ update_status == 'failed'
+ end
+
+ def update_in_progress?
+ update_status == 'started'
+ end
+
+ def update_repository(options)
+ raw.update(options)
+ end
+
+ def sync?
+ enabled?
+ end
+
+ def sync
+ return unless sync?
+
+ if recently_scheduled?
+ RepositoryUpdateRemoteMirrorWorker.perform_in(backoff_delay, self.id, Time.now)
+ else
+ RepositoryUpdateRemoteMirrorWorker.perform_async(self.id, Time.now)
+ end
+ end
+
+ def enabled
+ return false unless project && super
+ return false unless project.remote_mirror_available?
+ return false unless project.repository_exists?
+ return false if project.pending_delete?
+
+ true
+ end
+ alias_method :enabled?, :enabled
+
+ def updated_since?(timestamp)
+ last_update_started_at && last_update_started_at > timestamp && !update_failed?
+ end
+
+ def mark_for_delete_if_blank_url
+ mark_for_destruction if url.blank?
+ end
+
+ def mark_as_failed(error_message)
+ update_fail
+ update_column(:last_error, Gitlab::UrlSanitizer.sanitize(error_message))
+ end
+
+ def url=(value)
+ super(value) && return unless Gitlab::UrlSanitizer.valid?(value)
+
+ mirror_url = Gitlab::UrlSanitizer.new(value)
+ self.credentials = mirror_url.credentials
+
+ super(mirror_url.sanitized_url)
+ end
+
+ def url
+ if super
+ Gitlab::UrlSanitizer.new(super, credentials: credentials).full_url
+ end
+ rescue
+ super
+ end
+
+ def safe_url
+ return if url.nil?
+
+ result = URI.parse(url)
+ result.password = '*****' if result.password
+ result.user = '*****' if result.user && result.user != "git" # tokens or other data may be saved as user
+ result.to_s
+ end
+
+ private
+
+ def raw
+ @raw ||= Gitlab::Git::RemoteMirror.new(project.repository.raw, remote_name)
+ end
+
+ def fallback_remote_name
+ return unless id
+
+ "remote_mirror_#{id}"
+ end
+
+ def recently_scheduled?
+ return false unless self.last_update_started_at
+
+ self.last_update_started_at >= Time.now - backoff_delay
+ end
+
+ def backoff_delay
+ if self.only_protected_branches
+ PROTECTED_BACKOFF_DELAY
+ else
+ UNPROTECTED_BACKOFF_DELAY
+ end
+ end
+
+ def reset_fields
+ update_columns(
+ last_error: nil,
+ last_update_at: nil,
+ last_successful_update_at: nil,
+ update_status: 'finished'
+ )
+ end
+
+ def set_override_remote_mirror_available
+ enabled = read_attribute(:enabled)
+
+ project.update(remote_mirror_available_overridden: enabled)
+ end
+
+ def set_new_remote_name
+ self.remote_name = "remote_mirror_#{SecureRandom.hex}"
+ end
+
+ def refresh_remote
+ return unless project
+
+ # Before adding a new remote we have to delete the data from
+ # the previous remote name
+ prev_remote_name = remote_name_was || fallback_remote_name
+ run_after_commit do
+ project.repository.async_remove_remote(prev_remote_name)
+ end
+
+ project.repository.add_remote(remote_name, url)
+ end
+
+ def remove_remote
+ return unless project # could be pending to delete so don't need to touch the git repository
+
+ project.repository.async_remove_remote(remote_name)
+ end
+
+ def mirror_url_changed?
+ url_changed? || encrypted_credentials_changed?
+ end
+end
diff --git a/app/models/repository.rb b/app/models/repository.rb
index 6831305fb93..44c6bff6b66 100644
--- a/app/models/repository.rb
+++ b/app/models/repository.rb
@@ -37,7 +37,7 @@ class Repository
changelog license_blob license_key gitignore koding_yml
gitlab_ci_yml branch_names tag_names branch_count
tag_count avatar exists? root_ref has_visible_content?
- issue_template_names merge_request_template_names).freeze
+ issue_template_names merge_request_template_names xcode_project?).freeze
# Methods that use cache_method but only memoize the value
MEMOIZED_CACHED_METHODS = %i(license).freeze
@@ -55,7 +55,8 @@ class Repository
gitlab_ci: :gitlab_ci_yml,
avatar: :avatar,
issue_template: :issue_template_names,
- merge_request_template: :merge_request_template_names
+ merge_request_template: :merge_request_template_names,
+ xcode_config: :xcode_project?
}.freeze
def initialize(full_path, project, disk_path: nil, is_wiki: false)
@@ -594,6 +595,11 @@ class Repository
end
cache_method :gitlab_ci_yml
+ def xcode_project?
+ file_on_head(:xcode_config).present?
+ end
+ cache_method :xcode_project?
+
def head_commit
@head_commit ||= commit(self.root_ref)
end
@@ -854,13 +860,27 @@ class Repository
add_remote(remote_name, url, mirror_refmap: refmap)
fetch_remote(remote_name, forced: forced, prune: prune)
ensure
- remove_remote(remote_name) if tmp_remote_name
+ async_remove_remote(remote_name) if tmp_remote_name
end
def fetch_remote(remote, forced: false, ssh_auth: nil, no_tags: false, prune: true)
gitlab_shell.fetch_remote(raw_repository, remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, prune: prune)
end
+ def async_remove_remote(remote_name)
+ return unless remote_name
+
+ job_id = RepositoryRemoveRemoteWorker.perform_async(project.id, remote_name)
+
+ if job_id
+ Rails.logger.info("Remove remote job scheduled for #{project.id} with remote name: #{remote_name} job ID #{job_id}.")
+ else
+ Rails.logger.info("Remove remote job failed to create for #{project.id} with remote name #{remote_name}.")
+ end
+
+ job_id
+ end
+
def fetch_source_branch!(source_repository, source_branch, local_ref)
raw_repository.fetch_source_branch!(source_repository.raw_repository, source_branch, local_ref)
end
diff --git a/app/models/sent_notification.rb b/app/models/sent_notification.rb
index 6e311806be1..3da7c301d28 100644
--- a/app/models/sent_notification.rb
+++ b/app/models/sent_notification.rb
@@ -5,14 +5,14 @@ class SentNotification < ActiveRecord::Base
belongs_to :noteable, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
belongs_to :recipient, class_name: "User"
- validates :project, :recipient, presence: true
+ validates :recipient, presence: true
validates :reply_key, presence: true, uniqueness: true
validates :noteable_id, presence: true, unless: :for_commit?
validates :commit_id, presence: true, if: :for_commit?
validates :in_reply_to_discussion_id, format: { with: /\A\h{40}\z/, allow_nil: true }
validate :note_valid
- after_save :keep_around_commit
+ after_save :keep_around_commit, if: :for_commit?
class << self
def reply_key
diff --git a/app/models/system_note_metadata.rb b/app/models/system_note_metadata.rb
index 29035480371..1c2161accc4 100644
--- a/app/models/system_note_metadata.rb
+++ b/app/models/system_note_metadata.rb
@@ -17,7 +17,11 @@ class SystemNoteMetadata < ActiveRecord::Base
].freeze
validates :note, presence: true
- validates :action, inclusion: ICON_TYPES, allow_nil: true
+ validates :action, inclusion: { in: :icon_types }, allow_nil: true
belongs_to :note
+
+ def icon_types
+ ICON_TYPES
+ end
end
diff --git a/app/models/term_agreement.rb b/app/models/term_agreement.rb
new file mode 100644
index 00000000000..8458a231bbd
--- /dev/null
+++ b/app/models/term_agreement.rb
@@ -0,0 +1,6 @@
+class TermAgreement < ActiveRecord::Base
+ belongs_to :term, class_name: 'ApplicationSetting::Term'
+ belongs_to :user
+
+ validates :user, :term, presence: true
+end
diff --git a/app/models/user.rb b/app/models/user.rb
index 4a602ffbb05..173ab38e20c 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -138,6 +138,8 @@ class User < ActiveRecord::Base
has_many :custom_attributes, class_name: 'UserCustomAttribute'
has_many :callouts, class_name: 'UserCallout'
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
+ has_many :term_agreements
+ belongs_to :accepted_term, class_name: 'ApplicationSetting::Term'
#
# Validations
@@ -235,14 +237,18 @@ class User < ActiveRecord::Base
scope :order_recent_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'DESC')) }
scope :order_oldest_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'ASC')) }
- def self.with_two_factor
+ def self.with_two_factor_indistinct
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id")
- .where("u2f.id IS NOT NULL OR otp_required_for_login = ?", true).distinct(arel_table[:id])
+ .where("u2f.id IS NOT NULL OR users.otp_required_for_login = ?", true)
+ end
+
+ def self.with_two_factor
+ with_two_factor_indistinct.distinct(arel_table[:id])
end
def self.without_two_factor
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id")
- .where("u2f.id IS NULL AND otp_required_for_login = ?", false)
+ .where("u2f.id IS NULL AND users.otp_required_for_login = ?", false)
end
#
@@ -1091,8 +1097,11 @@ class User < ActiveRecord::Base
# <https://github.com/plataformatec/devise/blob/v4.0.0/lib/devise/models/lockable.rb#L92>
#
def increment_failed_attempts!
+ return if ::Gitlab::Database.read_only?
+
self.failed_attempts ||= 0
self.failed_attempts += 1
+
if attempts_exceeded?
lock_access! unless access_locked?
else
@@ -1187,6 +1196,15 @@ class User < ActiveRecord::Base
max_member_access_for_group_ids([group_id])[group_id]
end
+ def terms_accepted?
+ accepted_term_id.present?
+ end
+
+ def required_terms_not_accepted?
+ Gitlab::CurrentSettings.current_application_settings.enforce_terms? &&
+ !terms_accepted?
+ end
+
protected
# override, from Devise::Validatable
diff --git a/app/models/user_callout.rb b/app/models/user_callout.rb
index e4b69382626..9d461c6750a 100644
--- a/app/models/user_callout.rb
+++ b/app/models/user_callout.rb
@@ -2,7 +2,8 @@ class UserCallout < ActiveRecord::Base
belongs_to :user
enum feature_name: {
- gke_cluster_integration: 1
+ gke_cluster_integration: 1,
+ gcp_signup_offer: 2
}
validates :user, presence: true