1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
|
# frozen_string_literal: true
module Gitlab
module Analytics
module CycleAnalytics
# Arguments:
# stage - an instance of CycleAnalytics::ProjectStage or CycleAnalytics::GroupStage
# params:
# current_user: an instance of User
# from: DateTime
# to: DateTime
class DataCollector
include Gitlab::Utils::StrongMemoize
MAX_COUNT = 1001
delegate :serialized_records, to: :records_fetcher
def initialize(stage:, params: {})
@stage = stage
@params = params
end
def records_fetcher
strong_memoize(:records_fetcher) do
if use_aggregated_data_collector?
aggregated_data_collector.records_fetcher
else
RecordsFetcher.new(stage: stage, query: query, params: params)
end
end
end
def median
strong_memoize(:median) do
if use_aggregated_data_collector?
aggregated_data_collector.median
else
Median.new(stage: stage, query: query, params: params)
end
end
end
def average
strong_memoize(:average) do
Average.new(stage: stage, query: query, params: params)
end
end
def count
strong_memoize(:count) do
if use_aggregated_data_collector?
aggregated_data_collector.count
else
limit_count
end
end
end
private
attr_reader :stage, :params
def query
BaseQueryBuilder.new(stage: stage, params: params).build
end
# Limiting the maximum number of records so the COUNT(*) query stays efficient for large groups.
# COUNT = 1001, show 1000+ on the UI
# COUNT < 1001, show the actual number on the UI
def limit_count
query.limit(MAX_COUNT).count
end
def aggregated_data_collector
@aggregated_data_collector ||= Aggregated::DataCollector.new(stage: stage, params: params)
end
def use_aggregated_data_collector?
params.fetch(:use_aggregated_data_collector, false)
end
end
end
end
end
Gitlab::Analytics::CycleAnalytics::DataCollector.prepend_mod_with('Gitlab::Analytics::CycleAnalytics::DataCollector')
|