summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--changelogs/unreleased/runners-max-timeout-param.yml5
-rw-r--r--changelogs/unreleased/sh-handle-colons-in-url-passwords.yml5
-rw-r--r--db/fixtures/development/19_environments.rb4
-rw-r--r--db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb26
-rw-r--r--doc/administration/job_artifacts.md13
-rw-r--r--doc/administration/pages/index.md18
-rw-r--r--doc/development/what_requires_downtime.md4
-rw-r--r--doc/user/project/clusters/index.md5
-rw-r--r--lib/api/runners.rb2
-rw-r--r--lib/declarative_policy.rb12
-rw-r--r--lib/gitlab/background_migration/delete_diff_files.rb86
-rw-r--r--lib/gitlab/background_migration/schedule_diff_files_deletion.rb44
-rw-r--r--lib/gitlab/ee_compat_check.rb28
-rw-r--r--lib/gitlab/git/blob.rb89
-rw-r--r--lib/gitlab/url_sanitizer.rb2
-rw-r--r--lib/gitlab/workhorse.rb24
-rw-r--r--spec/lib/gitlab/background_migration/delete_diff_files_spec.rb62
-rw-r--r--spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb43
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb164
-rw-r--r--spec/lib/gitlab/url_sanitizer_spec.rb1
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb42
-rw-r--r--spec/migrations/enqueue_delete_diff_files_workers_spec.rb17
-rw-r--r--spec/requests/api/runners_spec.rb63
24 files changed, 459 insertions, 302 deletions
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 5af665a17e0..e23e3fd2982 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-0.111.0
+0.112.0
diff --git a/changelogs/unreleased/runners-max-timeout-param.yml b/changelogs/unreleased/runners-max-timeout-param.yml
new file mode 100644
index 00000000000..875f805d849
--- /dev/null
+++ b/changelogs/unreleased/runners-max-timeout-param.yml
@@ -0,0 +1,5 @@
+---
+title: Add missing maximum_timeout parameter
+merge_request: 20355
+author: gfyoung
+type: fixed
diff --git a/changelogs/unreleased/sh-handle-colons-in-url-passwords.yml b/changelogs/unreleased/sh-handle-colons-in-url-passwords.yml
new file mode 100644
index 00000000000..7717d0aab37
--- /dev/null
+++ b/changelogs/unreleased/sh-handle-colons-in-url-passwords.yml
@@ -0,0 +1,5 @@
+---
+title: Properly handle colons in URL passwords
+merge_request:
+author:
+type: fixed
diff --git a/db/fixtures/development/19_environments.rb b/db/fixtures/development/19_environments.rb
index 00a14f458d1..65089f6ba4e 100644
--- a/db/fixtures/development/19_environments.rb
+++ b/db/fixtures/development/19_environments.rb
@@ -30,14 +30,14 @@ class Gitlab::Seeder::Environments
def create_merge_request_review_deployments!
@project
.merge_requests
- .select { |mr| mr.source_branch.match(/\p{Alnum}+/) }
+ .select { |mr| mr.source_branch.match(/[^a-zA-Z0-9]+/) }
.sample(4)
.each do |merge_request|
next unless merge_request.diff_head_sha
create_deployment!(
merge_request.source_project,
- "review/#{merge_request.source_branch.gsub(/[^a-zA-Z0-9]/, '')}",
+ "review/#{merge_request.source_branch.gsub(/[^a-zA-Z0-9]+/, '')}",
merge_request.source_branch,
merge_request.diff_head_sha
)
diff --git a/db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb b/db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb
new file mode 100644
index 00000000000..c4d2f5f61a0
--- /dev/null
+++ b/db/post_migrate/20180619121030_enqueue_delete_diff_files_workers.rb
@@ -0,0 +1,26 @@
+class EnqueueDeleteDiffFilesWorkers < ActiveRecord::Migration
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+ SCHEDULER = 'ScheduleDiffFilesDeletion'.freeze
+ TMP_INDEX = 'tmp_partial_diff_id_with_files_index'.freeze
+
+ disable_ddl_transaction!
+
+ def up
+ unless index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
+ add_concurrent_index(:merge_request_diffs, :id, where: "(state NOT IN ('without_files', 'empty'))", name: TMP_INDEX)
+ end
+
+ BackgroundMigrationWorker.perform_async(SCHEDULER)
+
+ # We don't remove the index since it's going to be used on DeleteDiffFiles
+ # worker. We should remove it in an upcoming release.
+ end
+
+ def down
+ if index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
+ remove_concurrent_index_by_name(:merge_request_diffs, TMP_INDEX)
+ end
+ end
+end
diff --git a/doc/administration/job_artifacts.md b/doc/administration/job_artifacts.md
index 10228d027e8..8c55c8c4298 100644
--- a/doc/administration/job_artifacts.md
+++ b/doc/administration/job_artifacts.md
@@ -88,13 +88,12 @@ _The artifacts are stored by default in
### Using object storage
>**Notes:**
-- [Introduced][ee-1762] in [GitLab Premium][eep] 9.4.
-- Since version 9.5, artifacts are [browsable], when object storage is enabled.
- 9.4 lacks this feature.
-> Available in [GitLab Premium](https://about.gitlab.com/pricing/) and
-[GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
-> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/pricing/)
-> Since version 11.0, we support direct_upload to S3.
+- [Introduced](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/1762) in
+ [GitLab Premium](https://about.gitlab.com/pricing/) 9.4.
+- Since version 9.5, artifacts are [browsable](../user/project/pipelines/job_artifacts.md#browsing-artifacts),
+ when object storage is enabled. 9.4 lacks this feature.
+- Since version 10.6, available in [GitLab Core](https://about.gitlab.com/pricing/)
+- Since version 11.0, we support `direct_upload` to S3.
If you don't want to use the local disk where GitLab is installed to store the
artifacts, you can use an object storage like AWS S3 instead.
diff --git a/doc/administration/pages/index.md b/doc/administration/pages/index.md
index c0a281382a5..b3602bc35ab 100644
--- a/doc/administration/pages/index.md
+++ b/doc/administration/pages/index.md
@@ -259,6 +259,24 @@ verification requirement. Navigate to `Admin area âž” Settings` and uncheck
**Require users to prove ownership of custom domains** in the Pages section.
This setting is enabled by default.
+## Activate verbose logging for daemon
+
+Verbose logging was [introduced](https://gitlab.com/gitlab-org/omnibus-gitlab/merge_requests/2533) in
+Omnibus GitLab 11.1.
+
+Follow the steps below to configure verbose logging of GitLab Pages daemon.
+
+1. By default the daemon only logs with `INFO` level.
+
+ If you wish to make it log events with level `DEBUG` you must configure this in
+ `/etc/gitlab/gitlab.rb`:
+
+ ```shell
+ gitlab_pages['log_verbose'] = true
+ ```
+
+1. [Reconfigure GitLab][reconfigure]
+
## Change storage path
Follow the steps below to change the default path where GitLab Pages' contents
diff --git a/doc/development/what_requires_downtime.md b/doc/development/what_requires_downtime.md
index 47396666879..b668c9de6a0 100644
--- a/doc/development/what_requires_downtime.md
+++ b/doc/development/what_requires_downtime.md
@@ -198,14 +198,14 @@ And that's it, we're done!
## Changing The Schema For Large Tables
While `change_column_type_concurrently` and `rename_column_concurrently` can be
-used for changing the schema of a table without downtime it doesn't work very
+used for changing the schema of a table without downtime, it doesn't work very
well for large tables. Because all of the work happens in sequence the migration
can take a very long time to complete, preventing a deployment from proceeding.
They can also produce a lot of pressure on the database due to it rapidly
updating many rows in sequence.
To reduce database pressure you should instead use
-`change_column_type_using_background_migration` or `rename_column_concurrently`
+`change_column_type_using_background_migration` or `rename_column_using_background_migration`
when migrating a column in a large table (e.g. `issues`). These methods work
similarly to the concurrent counterparts but uses background migration to spread
the work / load over a longer time period, without slowing down deployments.
diff --git a/doc/user/project/clusters/index.md b/doc/user/project/clusters/index.md
index baa6efe50d5..cc1d65e4e6c 100644
--- a/doc/user/project/clusters/index.md
+++ b/doc/user/project/clusters/index.md
@@ -96,8 +96,9 @@ To add an existing Kubernetes cluster to your project:
you can follow the
[Kubernetes documentation](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/)
to create one. You can also view or create service tokens in the
- [Kubernetes dashboard](https://kubernetes.io/docs/tasks/access-application-cluster/web-ui-dashboard/#config)
- (under **Config > Secrets**).
+ [Kubernetes dashboard](https://kubernetes.io/docs/tasks/access-application-cluster/web-ui-dashboard/)
+ (under **Config > Secrets**). **The account that will issue the service token
+ must have admin privileges on the cluster.**
- **Project namespace** (optional) - You don't have to fill it in; by leaving
it blank, GitLab will create one for you. Also:
- Each project should have a unique namespace.
diff --git a/lib/api/runners.rb b/lib/api/runners.rb
index 2071c5a62c1..51242341dba 100644
--- a/lib/api/runners.rb
+++ b/lib/api/runners.rb
@@ -58,7 +58,7 @@ module API
optional :access_level, type: String, values: Ci::Runner.access_levels.keys,
desc: 'The access_level of the runner'
optional :maximum_timeout, type: Integer, desc: 'Maximum timeout set when this Runner will handle the job'
- at_least_one_of :description, :active, :tag_list, :run_untagged, :locked, :access_level
+ at_least_one_of :description, :active, :tag_list, :run_untagged, :locked, :access_level, :maximum_timeout
end
put ':id' do
runner = get_runner(params.delete(:id))
diff --git a/lib/declarative_policy.rb b/lib/declarative_policy.rb
index 1dd2855063d..dda6cd38dcd 100644
--- a/lib/declarative_policy.rb
+++ b/lib/declarative_policy.rb
@@ -21,7 +21,17 @@ module DeclarativePolicy
cache = opts[:cache] || {}
key = Cache.policy_key(user, subject)
- cache[key] ||= class_for(subject).new(user, subject, opts)
+ cache[key] ||=
+ if Gitlab.rails5?
+ # to avoid deadlocks in multi-threaded environment when
+ # autoloading is enabled, we allow concurrent loads,
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/48263
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ class_for(subject).new(user, subject, opts)
+ end
+ else
+ class_for(subject).new(user, subject, opts)
+ end
end
def class_for(subject)
diff --git a/lib/gitlab/background_migration/delete_diff_files.rb b/lib/gitlab/background_migration/delete_diff_files.rb
index 8fb2c334048..664ead1af44 100644
--- a/lib/gitlab/background_migration/delete_diff_files.rb
+++ b/lib/gitlab/background_migration/delete_diff_files.rb
@@ -4,41 +4,77 @@
module Gitlab
module BackgroundMigration
class DeleteDiffFiles
- def perform(merge_request_diff_id)
- merge_request_diff = MergeRequestDiff.find_by(id: merge_request_diff_id)
+ class MergeRequestDiff < ActiveRecord::Base
+ self.table_name = 'merge_request_diffs'
- return unless merge_request_diff
- return unless should_delete_diff_files?(merge_request_diff)
+ belongs_to :merge_request
+ has_many :merge_request_diff_files
+ end
- MergeRequestDiff.transaction do
- merge_request_diff.update_column(:state, 'without_files')
-
- # explain (analyze, buffers) when deleting 453 diff files:
- #
- # Delete on merge_request_diff_files (cost=0.57..8487.35 rows=4846 width=6) (actual time=43.265..43.265 rows=0 loops=1)
- # Buffers: shared hit=2043 read=259 dirtied=254
- # -> Index Scan using index_merge_request_diff_files_on_mr_diff_id_and_order on merge_request_diff_files (cost=0.57..8487.35 rows=4846 width=6) (actu
- # al time=0.466..26.317 rows=453 loops=1)
- # Index Cond: (merge_request_diff_id = 463448)
- # Buffers: shared hit=17 read=84
- # Planning time: 0.107 ms
- # Execution time: 43.287 ms
- #
- MergeRequestDiffFile.where(merge_request_diff_id: merge_request_diff.id).delete_all
+ class MergeRequestDiffFile < ActiveRecord::Base
+ self.table_name = 'merge_request_diff_files'
+ end
+
+ DEAD_TUPLES_THRESHOLD = 50_000
+ VACUUM_WAIT_TIME = 5.minutes
+
+ def perform(ids)
+ @ids = ids
+
+ # We should reschedule until deadtuples get in a desirable
+ # state (e.g. < 50_000). That may take more than one reschedule.
+ #
+ if should_wait_deadtuple_vacuum?
+ reschedule
+ return
end
+
+ prune_diff_files
+ end
+
+ def should_wait_deadtuple_vacuum?
+ return false unless Gitlab::Database.postgresql?
+
+ diff_files_dead_tuples_count >= DEAD_TUPLES_THRESHOLD
end
private
- def should_delete_diff_files?(merge_request_diff)
- return false if merge_request_diff.state == 'without_files'
+ def reschedule
+ BackgroundMigrationWorker.perform_in(VACUUM_WAIT_TIME, self.class.name.demodulize, [@ids])
+ end
+
+ def diffs_collection
+ MergeRequestDiff.where(id: @ids)
+ end
+
+ def diff_files_dead_tuples_count
+ dead_tuple =
+ execute_statement("SELECT n_dead_tup FROM pg_stat_all_tables "\
+ "WHERE relname = 'merge_request_diff_files'")[0]
- merge_request = merge_request_diff.merge_request
+ dead_tuple&.fetch('n_dead_tup', 0).to_i
+ end
+
+ def prune_diff_files
+ removed = 0
+ updated = 0
- return false unless merge_request.state == 'merged'
- return false if merge_request_diff.id == merge_request.latest_merge_request_diff_id
+ MergeRequestDiff.transaction do
+ updated = diffs_collection.update_all(state: 'without_files')
+ removed = MergeRequestDiffFile.where(merge_request_diff_id: @ids).delete_all
+ end
+
+ log_info("Removed #{removed} merge_request_diff_files rows, "\
+ "updated #{updated} merge_request_diffs rows")
+ end
+
+ def execute_statement(sql)
+ ActiveRecord::Base.connection.execute(sql)
+ end
- true
+ def log_info(message)
+ Rails.logger.info("BackgroundMigration::DeleteDiffFiles - #{message}")
end
end
end
diff --git a/lib/gitlab/background_migration/schedule_diff_files_deletion.rb b/lib/gitlab/background_migration/schedule_diff_files_deletion.rb
new file mode 100644
index 00000000000..609cf19187c
--- /dev/null
+++ b/lib/gitlab/background_migration/schedule_diff_files_deletion.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+# rubocop:disable Style/Documentation
+
+module Gitlab
+ module BackgroundMigration
+ class ScheduleDiffFilesDeletion
+ class MergeRequestDiff < ActiveRecord::Base
+ self.table_name = 'merge_request_diffs'
+
+ belongs_to :merge_request
+
+ include EachBatch
+ end
+
+ DIFF_BATCH_SIZE = 5_000
+ INTERVAL = 5.minutes
+ MIGRATION = 'DeleteDiffFiles'
+
+ def perform
+ diffs = MergeRequestDiff
+ .from("(#{diffs_collection.to_sql}) merge_request_diffs")
+ .where('merge_request_diffs.id != merge_request_diffs.latest_merge_request_diff_id')
+ .select(:id)
+
+ diffs.each_batch(of: DIFF_BATCH_SIZE) do |relation, index|
+ ids = relation.pluck(:id)
+
+ BackgroundMigrationWorker.perform_in(index * INTERVAL, MIGRATION, [ids])
+ end
+ end
+
+ private
+
+ def diffs_collection
+ MergeRequestDiff
+ .joins(:merge_request)
+ .where("merge_requests.state = 'merged'")
+ .where('merge_requests.latest_merge_request_diff_id IS NOT NULL')
+ .where("merge_request_diffs.state NOT IN ('without_files', 'empty')")
+ .select('merge_requests.latest_merge_request_diff_id, merge_request_diffs.id')
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ee_compat_check.rb b/lib/gitlab/ee_compat_check.rb
index 8c72d00c1f3..ee604e66154 100644
--- a/lib/gitlab/ee_compat_check.rb
+++ b/lib/gitlab/ee_compat_check.rb
@@ -138,15 +138,23 @@ module Gitlab
def ee_branch_presence_check!
ee_remotes.keys.each do |remote|
- [ce_branch, ee_branch_prefix, ee_branch_suffix].each do |branch|
- _, status = step("Fetching #{remote}/#{branch}", %W[git fetch #{remote} #{branch}])
+ output, _ = step(
+ "Searching #{remote}",
+ %W[git ls-remote #{remote} *#{minimal_ee_branch_name}*])
- if status.zero?
- @ee_remote_with_branch = remote
- @ee_branch_found = branch
- return true
- end
- end
+ branches =
+ output.scan(%r{(?<=refs/heads/|refs/tags/).+}).sort_by(&:size)
+
+ next if branches.empty?
+
+ branch = branches.first
+
+ step("Fetching #{remote}/#{branch}", %W[git fetch #{remote} #{branch}])
+
+ @ee_remote_with_branch = remote
+ @ee_branch_found = branch
+
+ return true
end
puts
@@ -271,6 +279,10 @@ module Gitlab
@ee_patch_full_path ||= patches_dir.join(ee_patch_name)
end
+ def minimal_ee_branch_name
+ @minimal_ee_branch_name ||= ce_branch.sub(/(\Ace\-|\-ce\z)/, '')
+ end
+
def patch_name_from_branch(branch_name)
branch_name.parameterize << '.patch'
end
diff --git a/lib/gitlab/git/blob.rb b/lib/gitlab/git/blob.rb
index 96fa94d5790..71857bd2d87 100644
--- a/lib/gitlab/git/blob.rb
+++ b/lib/gitlab/git/blob.rb
@@ -61,17 +61,8 @@ module Gitlab
# Keep in mind that this method may allocate a lot of memory. It is up
# to the caller to limit the number of blobs and blob_size_limit.
#
- # Gitaly migration issue: https://gitlab.com/gitlab-org/gitaly/issues/798
def batch(repository, blob_references, blob_size_limit: MAX_DATA_DISPLAY_SIZE)
- Gitlab::GitalyClient.migrate(:list_blobs_by_sha_path) do |is_enabled|
- if is_enabled
- repository.gitaly_blob_client.get_blobs(blob_references, blob_size_limit).to_a
- else
- blob_references.map do |sha, path|
- find(repository, sha, path, limit: blob_size_limit)
- end
- end
- end
+ repository.gitaly_blob_client.get_blobs(blob_references, blob_size_limit).to_a
end
# Returns an array of Blob instances just with the metadata, that means
@@ -84,16 +75,8 @@ module Gitlab
# Returns array of Gitlab::Git::Blob
# Does not guarantee blob data will be set
def batch_lfs_pointers(repository, blob_ids)
- repository.gitaly_migrate(:batch_lfs_pointers) do |is_enabled|
- if is_enabled
- repository.gitaly_blob_client.batch_lfs_pointers(blob_ids.to_a)
- else
- blob_ids.lazy
- .select { |sha| possible_lfs_blob?(repository, sha) }
- .map { |sha| rugged_raw(repository, sha, limit: LFS_POINTER_MAX_SIZE) }
- .select(&:lfs_pointer?)
- .force
- end
+ repository.wrapped_gitaly_errors do
+ repository.gitaly_blob_client.batch_lfs_pointers(blob_ids.to_a)
end
end
@@ -104,72 +87,6 @@ module Gitlab
def size_could_be_lfs?(size)
size.between?(LFS_POINTER_MIN_SIZE, LFS_POINTER_MAX_SIZE)
end
-
- private
-
- # Recursive search of blob id by path
- #
- # Ex.
- # blog/ # oid: 1a
- # app/ # oid: 2a
- # models/ # oid: 3a
- # file.rb # oid: 4a
- #
- #
- # Blob.find_entry_by_path(repo, '1a', 'blog', 'app', 'file.rb') # => '4a'
- #
- def find_entry_by_path(repository, root_id, *path_parts)
- root_tree = repository.lookup(root_id)
-
- entry = root_tree.find do |entry|
- entry[:name] == path_parts[0]
- end
-
- return nil unless entry
-
- if path_parts.size > 1
- return nil unless entry[:type] == :tree
-
- path_parts.shift
- find_entry_by_path(repository, entry[:oid], *path_parts)
- else
- [:blob, :commit].include?(entry[:type]) ? entry : nil
- end
- end
-
- def submodule_blob(blob_entry, path, sha)
- new(
- id: blob_entry[:oid],
- name: blob_entry[:name],
- size: 0,
- data: '',
- path: path,
- commit_id: sha
- )
- end
-
- def rugged_raw(repository, sha, limit:)
- blob = repository.lookup(sha)
-
- return unless blob.is_a?(Rugged::Blob)
-
- new(
- id: blob.oid,
- size: blob.size,
- data: blob.content(limit),
- binary: blob.binary?
- )
- end
-
- # Efficient lookup to determine if object size
- # and type make it a possible LFS blob without loading
- # blob content into memory with repository.lookup(sha)
- def possible_lfs_blob?(repository, sha)
- object_header = repository.rugged.read_header(sha)
-
- object_header[:type] == :blob &&
- size_could_be_lfs?(object_header[:len])
- end
end
def initialize(options)
diff --git a/lib/gitlab/url_sanitizer.rb b/lib/gitlab/url_sanitizer.rb
index 59331c827af..de8b6ec69ce 100644
--- a/lib/gitlab/url_sanitizer.rb
+++ b/lib/gitlab/url_sanitizer.rb
@@ -58,7 +58,7 @@ module Gitlab
if raw_credentials.present?
url.sub!("#{raw_credentials}@", '')
- user, password = raw_credentials.split(':')
+ user, _, password = raw_credentials.partition(':')
@credentials ||= { user: user.presence, password: password.presence }
end
diff --git a/lib/gitlab/workhorse.rb b/lib/gitlab/workhorse.rb
index 55c899912f9..a9629a92a50 100644
--- a/lib/gitlab/workhorse.rb
+++ b/lib/gitlab/workhorse.rb
@@ -98,16 +98,12 @@ module Gitlab
end
def send_git_patch(repository, diff_refs)
- params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_send_git_patch, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
- {
- 'GitalyServer' => gitaly_server_hash(repository),
- 'RawPatchRequest' => Gitaly::RawPatchRequest.new(
- gitaly_diff_or_patch_hash(repository, diff_refs)
- ).to_json
- }
- else
- workhorse_diff_or_patch_hash(repository, diff_refs)
- end
+ params = {
+ 'GitalyServer' => gitaly_server_hash(repository),
+ 'RawPatchRequest' => Gitaly::RawPatchRequest.new(
+ gitaly_diff_or_patch_hash(repository, diff_refs)
+ ).to_json
+ }
[
SEND_DATA_HEADER,
@@ -220,14 +216,6 @@ module Gitlab
}
end
- def workhorse_diff_or_patch_hash(repository, diff_refs)
- {
- 'RepoPath' => repository.path_to_repo,
- 'ShaFrom' => diff_refs.base_sha,
- 'ShaTo' => diff_refs.head_sha
- }
- end
-
def gitaly_diff_or_patch_hash(repository, diff_refs)
{
repository: repository.gitaly_repository,
diff --git a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb b/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
index 1b3df7b20d4..64c994a268f 100644
--- a/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
+++ b/spec/lib/gitlab/background_migration/delete_diff_files_spec.rb
@@ -1,31 +1,35 @@
require 'spec_helper'
-describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180626125654 do
+describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180619121030 do
describe '#perform' do
context 'when diff files can be deleted' do
let(:merge_request) { create(:merge_request, :merged) }
- let(:merge_request_diff) do
+ let!(:merge_request_diff) do
merge_request.create_merge_request_diff
merge_request.merge_request_diffs.first
end
+ let(:perform) do
+ described_class.new.perform(MergeRequestDiff.pluck(:id))
+ end
+
it 'deletes all merge request diff files' do
- expect { described_class.new.perform(merge_request_diff.id) }
+ expect { perform }
.to change { merge_request_diff.merge_request_diff_files.count }
.from(20).to(0)
end
it 'updates state to without_files' do
- expect { described_class.new.perform(merge_request_diff.id) }
+ expect { perform }
.to change { merge_request_diff.reload.state }
.from('collected').to('without_files')
end
it 'rollsback if something goes wrong' do
- expect(MergeRequestDiffFile).to receive_message_chain(:where, :delete_all)
+ expect(described_class::MergeRequestDiffFile).to receive_message_chain(:where, :delete_all)
.and_raise
- expect { described_class.new.perform(merge_request_diff.id) }
+ expect { perform }
.to raise_error
merge_request_diff.reload
@@ -35,35 +39,35 @@ describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, schema: 20180
end
end
- it 'deletes no merge request diff files when MR is not merged' do
- merge_request = create(:merge_request, :opened)
- merge_request.create_merge_request_diff
- merge_request_diff = merge_request.merge_request_diffs.first
-
- expect { described_class.new.perform(merge_request_diff.id) }
- .not_to change { merge_request_diff.merge_request_diff_files.count }
- .from(20)
- end
-
- it 'deletes no merge request diff files when diff is marked as "without_files"' do
+ it 'reschedules itself when should_wait_deadtuple_vacuum' do
merge_request = create(:merge_request, :merged)
- merge_request.create_merge_request_diff
- merge_request_diff = merge_request.merge_request_diffs.first
+ first_diff = merge_request.merge_request_diff
+ second_diff = merge_request.create_merge_request_diff
- merge_request_diff.clean!
+ Sidekiq::Testing.fake! do
+ worker = described_class.new
+ allow(worker).to receive(:should_wait_deadtuple_vacuum?) { true }
- expect { described_class.new.perform(merge_request_diff.id) }
- .not_to change { merge_request_diff.merge_request_diff_files.count }
- .from(20)
+ worker.perform([first_diff.id, second_diff.id])
+
+ expect(described_class.name.demodulize).to be_scheduled_delayed_migration(5.minutes, [first_diff.id, second_diff.id])
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
end
+ end
- it 'deletes no merge request diff files when diff is the latest' do
- merge_request = create(:merge_request, :merged)
- merge_request_diff = merge_request.merge_request_diff
+ describe '#should_wait_deadtuple_vacuum?' do
+ it 'returns true when hitting merge_request_diff_files hits DEAD_TUPLES_THRESHOLD', :postgresql do
+ worker = described_class.new
+ threshold_query_result = [{ "n_dead_tup" => described_class::DEAD_TUPLES_THRESHOLD.to_s }]
+ normal_query_result = [{ "n_dead_tup" => '3' }]
+
+ allow(worker)
+ .to receive(:execute_statement)
+ .with(/SELECT n_dead_tup */)
+ .and_return(threshold_query_result, normal_query_result)
- expect { described_class.new.perform(merge_request_diff.id) }
- .not_to change { merge_request_diff.merge_request_diff_files.count }
- .from(20)
+ expect(worker.should_wait_deadtuple_vacuum?).to be(true)
end
end
end
diff --git a/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb b/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb
new file mode 100644
index 00000000000..fb5093b0bd1
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/schedule_diff_files_deletion_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::ScheduleDiffFilesDeletion, :migration, schema: 20180619121030 do
+ describe '#perform' do
+ let(:merge_request_diffs) { table(:merge_request_diffs) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ before do
+ stub_const("#{described_class.name}::DIFF_BATCH_SIZE", 3)
+
+ namespaces.create!(id: 1, name: 'gitlab', path: 'gitlab')
+ projects.create!(id: 1, namespace_id: 1, name: 'gitlab', path: 'gitlab')
+
+ merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master', state: 'merged')
+
+ merge_request_diffs.create!(id: 1, merge_request_id: 1, state: 'collected')
+ merge_request_diffs.create!(id: 2, merge_request_id: 1, state: 'empty')
+ merge_request_diffs.create!(id: 3, merge_request_id: 1, state: 'without_files')
+ merge_request_diffs.create!(id: 4, merge_request_id: 1, state: 'collected')
+ merge_request_diffs.create!(id: 5, merge_request_id: 1, state: 'collected')
+ merge_request_diffs.create!(id: 6, merge_request_id: 1, state: 'collected')
+ merge_request_diffs.create!(id: 7, merge_request_id: 1, state: 'collected')
+
+ merge_requests.update(1, latest_merge_request_diff_id: 7)
+ end
+
+ it 'correctly schedules diff file deletion workers' do
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ described_class.new.perform
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, [1, 4, 5])
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, [6])
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index 6900c4189b7..034b89a46fa 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -178,77 +178,67 @@ describe Gitlab::Git::Blob, seed_helper: true do
end
describe '.batch' do
- shared_examples 'loading blobs in batch' do
- let(:blob_references) do
- [
- [SeedRepo::Commit::ID, "files/ruby/popen.rb"],
- [SeedRepo::Commit::ID, 'six']
- ]
- end
+ let(:blob_references) do
+ [
+ [SeedRepo::Commit::ID, "files/ruby/popen.rb"],
+ [SeedRepo::Commit::ID, 'six']
+ ]
+ end
- subject { described_class.batch(repository, blob_references) }
+ subject { described_class.batch(repository, blob_references) }
- it { expect(subject.size).to eq(blob_references.size) }
+ it { expect(subject.size).to eq(blob_references.size) }
- context 'first blob' do
- let(:blob) { subject[0] }
+ context 'first blob' do
+ let(:blob) { subject[0] }
- it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) }
- it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) }
- it { expect(blob.path).to eq("files/ruby/popen.rb") }
- it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) }
- it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) }
- it { expect(blob.size).to eq(669) }
- it { expect(blob.mode).to eq("100644") }
- end
+ it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) }
+ it { expect(blob.name).to eq(SeedRepo::RubyBlob::NAME) }
+ it { expect(blob.path).to eq("files/ruby/popen.rb") }
+ it { expect(blob.commit_id).to eq(SeedRepo::Commit::ID) }
+ it { expect(blob.data[0..10]).to eq(SeedRepo::RubyBlob::CONTENT[0..10]) }
+ it { expect(blob.size).to eq(669) }
+ it { expect(blob.mode).to eq("100644") }
+ end
- context 'second blob' do
- let(:blob) { subject[1] }
+ context 'second blob' do
+ let(:blob) { subject[1] }
- it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') }
- it { expect(blob.data).to eq('') }
- it 'does not mark the blob as binary' do
- expect(blob).not_to be_binary
- end
+ it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') }
+ it { expect(blob.data).to eq('') }
+ it 'does not mark the blob as binary' do
+ expect(blob).not_to be_binary
end
+ end
- context 'limiting' do
- subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) }
+ context 'limiting' do
+ subject { described_class.batch(repository, blob_references, blob_size_limit: blob_size_limit) }
- context 'positive' do
- let(:blob_size_limit) { 10 }
+ context 'positive' do
+ let(:blob_size_limit) { 10 }
- it { expect(subject.first.data.size).to eq(10) }
- end
+ it { expect(subject.first.data.size).to eq(10) }
+ end
- context 'zero' do
- let(:blob_size_limit) { 0 }
+ context 'zero' do
+ let(:blob_size_limit) { 0 }
- it 'only loads the metadata' do
- expect(subject.first.size).not_to be(0)
- expect(subject.first.data).to eq('')
- end
+ it 'only loads the metadata' do
+ expect(subject.first.size).not_to be(0)
+ expect(subject.first.data).to eq('')
end
+ end
- context 'negative' do
- let(:blob_size_limit) { -1 }
+ context 'negative' do
+ let(:blob_size_limit) { -1 }
- it 'ignores MAX_DATA_DISPLAY_SIZE' do
- stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100)
+ it 'ignores MAX_DATA_DISPLAY_SIZE' do
+ stub_const('Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE', 100)
- expect(subject.first.data.size).to eq(669)
- end
+ expect(subject.first.data.size).to eq(669)
end
end
end
-
- context 'when Gitaly list_blobs_by_sha_path feature is enabled' do
- it_behaves_like 'loading blobs in batch'
- end
-
- context 'when Gitaly list_blobs_by_sha_path feature is disabled', :disable_gitaly do
- it_behaves_like 'loading blobs in batch'
- end
end
describe '.batch_metadata' do
@@ -294,58 +284,48 @@ describe Gitlab::Git::Blob, seed_helper: true do
)
end
- shared_examples 'fetching batch of LFS pointers' do
- it 'returns a list of Gitlab::Git::Blob' do
- blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id])
-
- expect(blobs.count).to eq(1)
- expect(blobs).to all( be_a(Gitlab::Git::Blob) )
- expect(blobs).to be_an(Array)
- end
-
- it 'accepts blob IDs as a lazy enumerator' do
- blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id].lazy)
-
- expect(blobs.count).to eq(1)
- expect(blobs).to all( be_a(Gitlab::Git::Blob) )
- end
+ it 'returns a list of Gitlab::Git::Blob' do
+ blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id])
- it 'handles empty list of IDs gracefully' do
- blobs_1 = described_class.batch_lfs_pointers(repository, [].lazy)
- blobs_2 = described_class.batch_lfs_pointers(repository, [])
+ expect(blobs.count).to eq(1)
+ expect(blobs).to all( be_a(Gitlab::Git::Blob) )
+ expect(blobs).to be_an(Array)
+ end
- expect(blobs_1).to eq([])
- expect(blobs_2).to eq([])
- end
+ it 'accepts blob IDs as a lazy enumerator' do
+ blobs = described_class.batch_lfs_pointers(repository, [lfs_blob.id].lazy)
- it 'silently ignores tree objects' do
- blobs = described_class.batch_lfs_pointers(repository, [tree_object.oid])
+ expect(blobs.count).to eq(1)
+ expect(blobs).to all( be_a(Gitlab::Git::Blob) )
+ end
- expect(blobs).to eq([])
- end
+ it 'handles empty list of IDs gracefully' do
+ blobs_1 = described_class.batch_lfs_pointers(repository, [].lazy)
+ blobs_2 = described_class.batch_lfs_pointers(repository, [])
- it 'silently ignores non lfs objects' do
- blobs = described_class.batch_lfs_pointers(repository, [non_lfs_blob.id])
+ expect(blobs_1).to eq([])
+ expect(blobs_2).to eq([])
+ end
- expect(blobs).to eq([])
- end
+ it 'silently ignores tree objects' do
+ blobs = described_class.batch_lfs_pointers(repository, [tree_object.oid])
- it 'avoids loading large blobs into memory' do
- # This line could call `lookup` on `repository`, so do here before mocking.
- non_lfs_blob_id = non_lfs_blob.id
+ expect(blobs).to eq([])
+ end
- expect(repository).not_to receive(:lookup)
+ it 'silently ignores non lfs objects' do
+ blobs = described_class.batch_lfs_pointers(repository, [non_lfs_blob.id])
- described_class.batch_lfs_pointers(repository, [non_lfs_blob_id])
- end
+ expect(blobs).to eq([])
end
- context 'when Gitaly batch_lfs_pointers is enabled' do
- it_behaves_like 'fetching batch of LFS pointers'
- end
+ it 'avoids loading large blobs into memory' do
+ # This line could call `lookup` on `repository`, so do here before mocking.
+ non_lfs_blob_id = non_lfs_blob.id
+
+ expect(repository).not_to receive(:lookup)
- context 'when Gitaly batch_lfs_pointers is disabled', :disable_gitaly do
- it_behaves_like 'fetching batch of LFS pointers'
+ described_class.batch_lfs_pointers(repository, [non_lfs_blob_id])
end
end
diff --git a/spec/lib/gitlab/url_sanitizer_spec.rb b/spec/lib/gitlab/url_sanitizer_spec.rb
index fc8991fd31f..758a9bc5a2b 100644
--- a/spec/lib/gitlab/url_sanitizer_spec.rb
+++ b/spec/lib/gitlab/url_sanitizer_spec.rb
@@ -92,6 +92,7 @@ describe Gitlab::UrlSanitizer do
context 'credentials in URL' do
where(:url, :credentials) do
'http://foo:bar@example.com' | { user: 'foo', password: 'bar' }
+ 'http://foo:bar:baz@example.com' | { user: 'foo', password: 'bar:baz' }
'http://:bar@example.com' | { user: nil, password: 'bar' }
'http://foo:@example.com' | { user: 'foo', password: nil }
'http://foo@example.com' | { user: 'foo', password: nil }
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 98a1865d347..23869f3d2da 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -68,34 +68,22 @@ describe Gitlab::Workhorse do
let(:diff_refs) { double(base_sha: "base", head_sha: "head") }
subject { described_class.send_git_patch(repository, diff_refs) }
- context 'when Gitaly workhorse_send_git_patch feature is enabled' do
- it 'sets the header correctly' do
- key, command, params = decode_workhorse_header(subject)
-
- expect(key).to eq("Gitlab-Workhorse-Send-Data")
- expect(command).to eq("git-format-patch")
- expect(params).to eq({
- 'GitalyServer' => {
- address: Gitlab::GitalyClient.address(project.repository_storage),
- token: Gitlab::GitalyClient.token(project.repository_storage)
- },
- 'RawPatchRequest' => Gitaly::RawPatchRequest.new(
- repository: repository.gitaly_repository,
- left_commit_id: 'base',
- right_commit_id: 'head'
- ).to_json
- }.deep_stringify_keys)
- end
- end
-
- context 'when Gitaly workhorse_send_git_patch feature is disabled', :disable_gitaly do
- it 'sets the header correctly' do
- key, command, params = decode_workhorse_header(subject)
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
- expect(key).to eq("Gitlab-Workhorse-Send-Data")
- expect(command).to eq("git-format-patch")
- expect(params).to eq("RepoPath" => repository.path_to_repo, "ShaFrom" => "base", "ShaTo" => "head")
- end
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("git-format-patch")
+ expect(params).to eq({
+ 'GitalyServer' => {
+ address: Gitlab::GitalyClient.address(project.repository_storage),
+ token: Gitlab::GitalyClient.token(project.repository_storage)
+ },
+ 'RawPatchRequest' => Gitaly::RawPatchRequest.new(
+ repository: repository.gitaly_repository,
+ left_commit_id: 'base',
+ right_commit_id: 'head'
+ ).to_json
+ }.deep_stringify_keys)
end
end
diff --git a/spec/migrations/enqueue_delete_diff_files_workers_spec.rb b/spec/migrations/enqueue_delete_diff_files_workers_spec.rb
new file mode 100644
index 00000000000..6bae870920c
--- /dev/null
+++ b/spec/migrations/enqueue_delete_diff_files_workers_spec.rb
@@ -0,0 +1,17 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20180619121030_enqueue_delete_diff_files_workers.rb')
+
+describe EnqueueDeleteDiffFilesWorkers, :migration, :sidekiq do
+ it 'correctly schedules diff files deletion schedulers' do
+ Sidekiq::Testing.fake! do
+ expect(BackgroundMigrationWorker)
+ .to receive(:perform_async)
+ .with(described_class::SCHEDULER)
+ .and_call_original
+
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+ end
+end
diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb
index b5e4b6011ea..eb57c734e92 100644
--- a/spec/requests/api/runners_spec.rb
+++ b/spec/requests/api/runners_spec.rb
@@ -211,6 +211,69 @@ describe API::Runners do
describe 'PUT /runners/:id' do
context 'admin user' do
+ # see https://gitlab.com/gitlab-org/gitlab-ce/issues/48625
+ context 'single parameter update' do
+ it 'runner description' do
+ description = shared_runner.description
+ update_runner(shared_runner.id, admin, description: "#{description}_updated")
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(shared_runner.reload.description).to eq("#{description}_updated")
+ end
+
+ it 'runner active state' do
+ active = shared_runner.active
+ update_runner(shared_runner.id, admin, active: !active)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(shared_runner.reload.active).to eq(!active)
+ end
+
+ it 'runner tag list' do
+ update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(shared_runner.reload.tag_list).to include('ruby2.1', 'pgsql', 'mysql')
+ end
+
+ it 'runner untagged flag' do
+ # Ensure tag list is non-empty before setting untagged to false.
+ update_runner(shared_runner.id, admin, tag_list: ['ruby2.1', 'pgsql', 'mysql'])
+ update_runner(shared_runner.id, admin, run_untagged: 'false')
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(shared_runner.reload.run_untagged?).to be(false)
+ end
+
+ it 'runner unlocked flag' do
+ update_runner(shared_runner.id, admin, locked: 'true')
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(shared_runner.reload.locked?).to be(true)
+ end
+
+ it 'runner access level' do
+ update_runner(shared_runner.id, admin, access_level: 'ref_protected')
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(shared_runner.reload.ref_protected?).to be_truthy
+ end
+
+ it 'runner maximum timeout' do
+ update_runner(shared_runner.id, admin, maximum_timeout: 1234)
+
+ expect(response).to have_gitlab_http_status(200)
+ expect(shared_runner.reload.maximum_timeout).to eq(1234)
+ end
+
+ it 'fails with no parameters' do
+ put api("/runners/#{shared_runner.id}", admin)
+
+ shared_runner.reload
+ expect(response).to have_gitlab_http_status(400)
+ end
+ end
+
context 'when runner is shared' do
it 'updates runner' do
description = shared_runner.description