summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-03-11 15:09:37 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-03-11 15:09:37 +0000
commita210c43e0aca0311cc1d3d381763b25979ec72dc (patch)
tree0325d173da7a6e7bd6c2cdf450d0aa1c4e142d0f /lib
parentc9687bdf58e9d4a9c3942f587bd4841f42e3b5de (diff)
downloadgitlab-ce-a210c43e0aca0311cc1d3d381763b25979ec72dc.tar.gz
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'lib')
-rw-r--r--lib/gitlab/background_migration/backfill_snippet_repositories.rb89
-rw-r--r--lib/gitlab/elasticsearch/logs.rb115
-rw-r--r--lib/gitlab/usage_counters/common.rb30
-rw-r--r--lib/gitlab/usage_counters/pod_logs.rb11
4 files changed, 245 insertions, 0 deletions
diff --git a/lib/gitlab/background_migration/backfill_snippet_repositories.rb b/lib/gitlab/background_migration/backfill_snippet_repositories.rb
new file mode 100644
index 00000000000..fa6453abefb
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_snippet_repositories.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Class that will fill the project_repositories table for projects that
+ # are on hashed storage and an entry is missing in this table.
+ class BackfillSnippetRepositories
+ MAX_RETRIES = 2
+
+ def perform(start_id, stop_id)
+ Snippet.includes(:author, snippet_repository: :shard).where(id: start_id..stop_id).find_each do |snippet|
+ # We need to expire the exists? value for the cached method in case it was cached
+ snippet.repository.expire_exists_cache
+
+ next if repository_present?(snippet)
+
+ retry_index = 0
+
+ begin
+ create_repository_and_files(snippet)
+
+ logger.info(message: 'Snippet Migration: repository created and migrated', snippet: snippet.id)
+ rescue => e
+ retry_index += 1
+
+ retry if retry_index < MAX_RETRIES
+
+ logger.error(message: "Snippet Migration: error migrating snippet. Reason: #{e.message}", snippet: snippet.id)
+
+ destroy_snippet_repository(snippet)
+ delete_repository(snippet)
+ end
+ end
+ end
+
+ private
+
+ def repository_present?(snippet)
+ snippet.snippet_repository && !snippet.empty_repo?
+ end
+
+ def create_repository_and_files(snippet)
+ snippet.create_repository
+ create_commit(snippet)
+ end
+
+ def destroy_snippet_repository(snippet)
+ # Removing the db record
+ snippet.snippet_repository&.destroy
+ rescue => e
+ logger.error(message: "Snippet Migration: error destroying snippet repository. Reason: #{e.message}", snippet: snippet.id)
+ end
+
+ def delete_repository(snippet)
+ # Removing the repository in disk
+ snippet.repository.remove if snippet.repository_exists?
+ rescue => e
+ logger.error(message: "Snippet Migration: error deleting repository. Reason: #{e.message}", snippet: snippet.id)
+ end
+
+ def logger
+ @logger ||= Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def snippet_action(snippet)
+ # We don't need the previous_path param
+ # Because we're not updating any existing file
+ [{ file_path: filename(snippet),
+ content: snippet.content }]
+ end
+
+ def filename(snippet)
+ snippet.file_name.presence || empty_file_name
+ end
+
+ def empty_file_name
+ @empty_file_name ||= "#{SnippetRepository::DEFAULT_EMPTY_FILE_NAME}1.txt"
+ end
+
+ def commit_attrs
+ @commit_attrs ||= { branch_name: 'master', message: 'Initial commit' }
+ end
+
+ def create_commit(snippet)
+ snippet.snippet_repository.multi_files_action(snippet.author, snippet_action(snippet), commit_attrs)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/elasticsearch/logs.rb b/lib/gitlab/elasticsearch/logs.rb
new file mode 100644
index 00000000000..eca8b71dd7d
--- /dev/null
+++ b/lib/gitlab/elasticsearch/logs.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Elasticsearch
+ class Logs
+ # How many log lines to fetch in a query
+ LOGS_LIMIT = 500
+
+ def initialize(client)
+ @client = client
+ end
+
+ def pod_logs(namespace, pod_name, container_name = nil, search = nil, start_time = nil, end_time = nil)
+ query = { bool: { must: [] } }.tap do |q|
+ filter_pod_name(q, pod_name)
+ filter_namespace(q, namespace)
+ filter_container_name(q, container_name)
+ filter_search(q, search)
+ filter_times(q, start_time, end_time)
+ end
+
+ body = build_body(query)
+ response = @client.search body: body
+
+ format_response(response)
+ end
+
+ private
+
+ def build_body(query)
+ {
+ query: query,
+ # reverse order so we can query N-most recent records
+ sort: [
+ { "@timestamp": { order: :desc } },
+ { "offset": { order: :desc } }
+ ],
+ # only return these fields in the response
+ _source: ["@timestamp", "message"],
+ # fixed limit for now, we should support paginated queries
+ size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT
+ }
+ end
+
+ def filter_pod_name(query, pod_name)
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.pod.name" => {
+ query: pod_name
+ }
+ }
+ }
+ end
+
+ def filter_namespace(query, namespace)
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.namespace" => {
+ query: namespace
+ }
+ }
+ }
+ end
+
+ def filter_container_name(query, container_name)
+ # A pod can contain multiple containers.
+ # By default we return logs from every container
+ return if container_name.nil?
+
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.container.name" => {
+ query: container_name
+ }
+ }
+ }
+ end
+
+ def filter_search(query, search)
+ return if search.nil?
+
+ query[:bool][:must] << {
+ simple_query_string: {
+ query: search,
+ fields: [:message],
+ default_operator: :and
+ }
+ }
+ end
+
+ def filter_times(query, start_time, end_time)
+ return unless start_time || end_time
+
+ time_range = { range: { :@timestamp => {} } }.tap do |tr|
+ tr[:range][:@timestamp][:gte] = start_time if start_time
+ tr[:range][:@timestamp][:lt] = end_time if end_time
+ end
+
+ query[:bool][:filter] = [time_range]
+ end
+
+ def format_response(response)
+ result = response.fetch("hits", {}).fetch("hits", []).map do |hit|
+ {
+ timestamp: hit["_source"]["@timestamp"],
+ message: hit["_source"]["message"]
+ }
+ end
+
+ # we queried for the N-most recent records but we want them ordered oldest to newest
+ result.reverse
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_counters/common.rb b/lib/gitlab/usage_counters/common.rb
new file mode 100644
index 00000000000..a5bdac430f4
--- /dev/null
+++ b/lib/gitlab/usage_counters/common.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageCounters
+ class Common
+ class << self
+ def increment(project_id)
+ Gitlab::Redis::SharedState.with { |redis| redis.hincrby(base_key, project_id, 1) }
+ end
+
+ def usage_totals
+ Gitlab::Redis::SharedState.with do |redis|
+ total_sum = 0
+
+ totals = redis.hgetall(base_key).each_with_object({}) do |(project_id, count), result|
+ total_sum += result[project_id.to_i] = count.to_i
+ end
+
+ totals[:total] = total_sum
+ totals
+ end
+ end
+
+ def base_key
+ raise NotImplementedError
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_counters/pod_logs.rb b/lib/gitlab/usage_counters/pod_logs.rb
new file mode 100644
index 00000000000..94e29d2fad7
--- /dev/null
+++ b/lib/gitlab/usage_counters/pod_logs.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageCounters
+ class PodLogs < Common
+ def self.base_key
+ 'POD_LOGS_USAGE_COUNTS'
+ end
+ end
+ end
+end