summaryrefslogtreecommitdiff
path: root/app/models/ci/bridge.rb
blob: a06b920342c06f142d203b8c4237e58bbb089793 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
# frozen_string_literal: true

module Ci
  class Bridge < Ci::Processable
    include Ci::Contextable
    include Ci::Metadatable
    include Importable
    include AfterCommitQueue
    include Ci::HasRef

    InvalidBridgeTypeError = Class.new(StandardError)
    InvalidTransitionError = Class.new(StandardError)

    FORWARD_DEFAULTS = {
      yaml_variables: true,
      pipeline_variables: false
    }.freeze

    belongs_to :project
    belongs_to :trigger_request
    has_many :sourced_pipelines, class_name: "::Ci::Sources::Pipeline",
                                  foreign_key: :source_job_id

    has_one :sourced_pipeline, class_name: "::Ci::Sources::Pipeline", foreign_key: :source_job_id
    has_one :downstream_pipeline, through: :sourced_pipeline, source: :pipeline

    validates :ref, presence: true

    # rubocop:disable Cop/ActiveRecordSerialize
    serialize :options
    serialize :yaml_variables, ::Gitlab::Serializer::Ci::Variables
    # rubocop:enable Cop/ActiveRecordSerialize

    state_machine :status do
      after_transition [:created, :manual, :waiting_for_resource] => :pending do |bridge|
        next unless bridge.triggers_downstream_pipeline?

        bridge.run_after_commit do
          ::Ci::CreateDownstreamPipelineWorker.perform_async(bridge.id)
        end
      end

      event :pending do
        transition all => :pending
      end

      event :manual do
        transition all => :manual
      end

      event :scheduled do
        transition all => :scheduled
      end

      event :actionize do
        transition created: :manual
      end
    end

    def retryable?
      return false unless Feature.enabled?(:ci_recreate_downstream_pipeline, project)

      return false if failed? && (pipeline_loop_detected? || reached_max_descendant_pipelines_depth?)

      super
    end

    def self.with_preloads
      preload(
        :metadata,
        downstream_pipeline: [project: [:route, { namespace: :route }]],
        project: [:namespace]
      )
    end

    def self.clone_accessors
      %i[pipeline project ref tag options name
         allow_failure stage stage_id stage_idx
         yaml_variables when description needs_attributes
         scheduling_type].freeze
    end

    def inherit_status_from_downstream!(pipeline)
      case pipeline.status
      when 'success'
        self.success!
      when 'failed', 'canceled', 'skipped'
        self.drop!
      else
        false
      end
    end

    def has_downstream_pipeline?
      sourced_pipelines.exists?
    end

    def downstream_pipeline_params
      return child_params if triggers_child_pipeline?
      return cross_project_params if downstream_project.present?

      {}
    end

    def downstream_project
      strong_memoize(:downstream_project) do
        if downstream_project_path
          ::Project.find_by_full_path(downstream_project_path)
        elsif triggers_child_pipeline?
          project
        end
      end
    end

    def downstream_project_path
      strong_memoize(:downstream_project_path) do
        options&.dig(:trigger, :project)
      end
    end

    def parent_pipeline
      pipeline if triggers_child_pipeline?
    end

    def triggers_downstream_pipeline?
      triggers_child_pipeline? || triggers_cross_project_pipeline?
    end

    def triggers_child_pipeline?
      yaml_for_downstream.present?
    end

    def triggers_cross_project_pipeline?
      downstream_project_path.present?
    end

    def tags
      [:bridge]
    end

    def detailed_status(current_user)
      Gitlab::Ci::Status::Bridge::Factory
        .new(self, current_user)
        .fabricate!
    end

    def schedulable?
      false
    end

    def playable?
      action? && !archived? && manual?
    end

    def action?
      %w[manual].include?(self.when)
    end

    # rubocop: disable CodeReuse/ServiceClass
    # We don't need it but we are taking `job_variables_attributes` parameter
    # to make it consistent with `Ci::Build#play` method.
    def play(current_user, job_variables_attributes = nil)
      Ci::PlayBridgeService
        .new(project, current_user)
        .execute(self)
    end
    # rubocop: enable CodeReuse/ServiceClass

    def artifacts?
      false
    end

    def runnable?
      false
    end

    def any_unmet_prerequisites?
      false
    end

    def expanded_environment_name
    end

    def persisted_environment
    end

    def execute_hooks
      raise NotImplementedError
    end

    def to_partial_path
      'projects/generic_commit_statuses/generic_commit_status'
    end

    def yaml_for_downstream
      strong_memoize(:yaml_for_downstream) do
        includes = options&.dig(:trigger, :include)
        YAML.dump('include' => includes) if includes
      end
    end

    def target_ref
      branch = options&.dig(:trigger, :branch)
      return unless branch

      scoped_variables.to_runner_variables.yield_self do |all_variables|
        ::ExpandVariables.expand(branch, all_variables)
      end
    end

    def dependent?
      strong_memoize(:dependent) do
        options&.dig(:trigger, :strategy) == 'depend'
      end
    end

    def downstream_variables
      if ::Feature.enabled?(:ci_trigger_forward_variables, project)
        calculate_downstream_variables
          .reverse # variables priority
          .uniq { |var| var[:key] } # only one variable key to pass
          .reverse
      else
        legacy_downstream_variables
      end
    end

    def target_revision_ref
      downstream_pipeline_params.dig(:target_revision, :ref)
    end

    private

    def cross_project_params
      {
        project: downstream_project,
        source: :pipeline,
        target_revision: {
          ref: target_ref || downstream_project.default_branch,
          variables_attributes: downstream_variables
        },
        execute_params: {
          ignore_skip_ci: true,
          bridge: self
        }
      }
    end

    def child_params
      parent_pipeline = pipeline

      {
        project: project,
        source: :parent_pipeline,
        target_revision: {
          ref: parent_pipeline.ref,
          checkout_sha: parent_pipeline.sha,
          before: parent_pipeline.before_sha,
          source_sha: parent_pipeline.source_sha,
          target_sha: parent_pipeline.target_sha,
          variables_attributes: downstream_variables
        },
        execute_params: {
          ignore_skip_ci: true,
          bridge: self,
          merge_request: parent_pipeline.merge_request
        }
      }
    end

    def legacy_downstream_variables
      variables = scoped_variables.concat(pipeline.persisted_variables)

      variables.to_runner_variables.yield_self do |all_variables|
        yaml_variables.to_a.map do |hash|
          { key: hash[:key], value: ::ExpandVariables.expand(hash[:value], all_variables) }
        end
      end
    end

    def calculate_downstream_variables
      expand_variables = scoped_variables
                           .concat(pipeline.persisted_variables)
                           .to_runner_variables

      # The order of this list refers to the priority of the variables
      downstream_yaml_variables(expand_variables) +
        downstream_pipeline_variables(expand_variables) +
        downstream_pipeline_schedule_variables(expand_variables)
    end

    def downstream_yaml_variables(expand_variables)
      return [] unless forward_yaml_variables?

      yaml_variables.to_a.map do |hash|
        { key: hash[:key], value: ::ExpandVariables.expand(hash[:value], expand_variables) }
      end
    end

    def downstream_pipeline_variables(expand_variables)
      return [] unless forward_pipeline_variables?

      pipeline.variables.to_a.map do |variable|
        { key: variable.key, value: ::ExpandVariables.expand(variable.value, expand_variables) }
      end
    end

    def downstream_pipeline_schedule_variables(expand_variables)
      return [] unless forward_pipeline_variables?
      return [] unless pipeline.pipeline_schedule

      pipeline.pipeline_schedule.variables.to_a.map do |variable|
        { key: variable.key, value: ::ExpandVariables.expand(variable.value, expand_variables) }
      end
    end

    def forward_yaml_variables?
      strong_memoize(:forward_yaml_variables) do
        result = options&.dig(:trigger, :forward, :yaml_variables)

        result.nil? ? FORWARD_DEFAULTS[:yaml_variables] : result
      end
    end

    def forward_pipeline_variables?
      strong_memoize(:forward_pipeline_variables) do
        result = options&.dig(:trigger, :forward, :pipeline_variables)

        result.nil? ? FORWARD_DEFAULTS[:pipeline_variables] : result
      end
    end
  end
end

::Ci::Bridge.prepend_mod_with('Ci::Bridge')