summaryrefslogtreecommitdiff
path: root/app/services/ci/job_artifacts/create_service.rb
blob: 1647e9210923795c473a3ff499ad4d56f8c8713e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
# frozen_string_literal: true

module Ci
  module JobArtifacts
    class CreateService < ::BaseService
      include Gitlab::Utils::UsageData

      LSIF_ARTIFACT_TYPE = 'lsif'

      OBJECT_STORAGE_ERRORS = [
        Errno::EIO,
        Google::Apis::ServerError,
        Signet::RemoteServerError
      ].freeze

      def initialize(job)
        @job = job
        @project = job.project
        @pipeline = job.pipeline
      end

      def authorize(artifact_type:, filesize: nil)
        result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
        return result unless result[:status] == :success

        headers = JobArtifactUploader.workhorse_authorize(
          has_length: false,
          maximum_size: max_size(artifact_type),
          use_final_store_path: Feature.enabled?(:ci_artifacts_upload_to_final_location, project),
          final_store_path_root_id: project.id
        )

        if lsif?(artifact_type)
          headers[:ProcessLsif] = true
          track_usage_event('i_source_code_code_intelligence', project.id)
        end

        success(headers: headers)
      end

      def execute(artifacts_file, params, metadata_file: nil)
        result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
        return result unless result[:status] == :success

        return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)

        build_result = build_artifact(artifacts_file, params, metadata_file)
        return build_result unless build_result[:status] == :success

        artifact = build_result[:artifact]
        artifact_metadata = build_result[:artifact_metadata]

        track_artifact_uploader(artifact)

        parse_result = parse_artifact(artifact)
        return parse_result unless parse_result[:status] == :success

        persist_artifact(artifact, artifact_metadata)
      end

      private

      attr_reader :job, :project, :pipeline

      def validate_requirements(artifact_type:, filesize:)
        return too_large_error if too_large?(artifact_type, filesize)

        success
      end

      def too_large?(type, size)
        size > max_size(type) if size
      end

      def lsif?(type)
        type == LSIF_ARTIFACT_TYPE
      end

      def max_size(type)
        Ci::JobArtifact.max_artifact_size(type: type, project: project)
      end

      def too_large_error
        error('file size has reached maximum size limit', :payload_too_large)
      end

      def build_artifact(artifacts_file, params, metadata_file)
        artifact_attributes = {
          job: job,
          project: project,
          expire_in: expire_in(params),
          accessibility: accessibility(params),
          locked: pipeline.locked
        }

        file_attributes = {
          file_type: params[:artifact_type],
          file_format: params[:artifact_format],
          file_sha256: artifacts_file.sha256,
          file: artifacts_file
        }

        artifact = Ci::JobArtifact.new(artifact_attributes.merge(file_attributes))

        artifact_metadata = build_metadata_artifact(artifact, metadata_file) if metadata_file

        success(artifact: artifact, artifact_metadata: artifact_metadata)
      end

      def build_metadata_artifact(job_artifact, metadata_file)
        Ci::JobArtifact.new(
          job: job_artifact.job,
          project: job_artifact.project,
          expire_at: job_artifact.expire_at,
          locked: job_artifact.locked,
          file: metadata_file,
          file_type: :metadata,
          file_format: :gzip,
          file_sha256: metadata_file.sha256,
          accessibility: job_artifact.accessibility
        )
      end

      def expire_in(params)
        params['expire_in'] || Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
      end

      def accessibility(params)
        accessibility = params[:accessibility]

        return :public if Feature.disabled?(:non_public_artifacts, type: :development)

        return accessibility if accessibility.present?

        job.artifacts_public? ? :public : :private
      end

      def parse_artifact(artifact)
        case artifact.file_type
        when 'dotenv' then parse_dotenv_artifact(artifact)
        else success
        end
      end

      def persist_artifact(artifact, artifact_metadata)
        job.transaction do
          # NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
          # Running it first because in migrations we lock the `ci_builds` table
          # first and then the others. This reduces the chances of deadlocks.
          job.update_column(:artifacts_expire_at, artifact.expire_at)

          artifact.save!
          artifact_metadata&.save!
        end

        success(artifact: artifact)
      rescue ActiveRecord::RecordNotUnique => error
        track_exception(error, artifact.file_type)
        error('another artifact of the same type already exists', :bad_request)
      rescue *OBJECT_STORAGE_ERRORS => error
        track_exception(error, artifact.file_type)
        error(error.message, :service_unavailable)
      rescue StandardError => error
        track_exception(error, artifact.file_type)
        error(error.message, :bad_request)
      end

      def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
        existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
        return false unless existing_artifact

        existing_artifact.file_sha256 == artifacts_file.sha256
      end

      def track_exception(error, artifact_type)
        Gitlab::ErrorTracking.track_exception(
          error,
          job_id: job.id,
          project_id: job.project_id,
          uploading_type: artifact_type
        )
      end

      def track_artifact_uploader(_artifact)
        # Overridden in EE
      end

      def parse_dotenv_artifact(artifact)
        Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
      end
    end
  end
end

Ci::JobArtifacts::CreateService.prepend_mod