summaryrefslogtreecommitdiff
path: root/app/uploaders/file_uploader.rb
blob: 6dfe2bed0ba9f849ff6dc519e28be8b0a37ead0c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
# frozen_string_literal: true

# This class breaks the actual CarrierWave concept.
# Every uploader should use a base_dir that is model agnostic so we can build
# back URLs from base_dir-relative paths saved in the `Upload` model.
#
# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
# there is no way to build back the correct file path without the model, which defies
# CarrierWave way of storing files.
#
class FileUploader < GitlabUploader
  include UploaderHelper
  include RecordsUploads::Concern
  include ObjectStorage::Concern
  prepend ObjectStorage::Extension::RecordsUploads

  MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}.freeze
  DYNAMIC_PATH_PATTERN = %r{.*(?<secret>\h{32})/(?<identifier>.*)}.freeze

  after :remove, :prune_store_dir

  # FileUploader do not run in a model transaction, so we can simply
  # enqueue a job after the :store hook.
  after :store, :schedule_background_upload

  def self.root
    File.join(options.storage_path, 'uploads')
  end

  def self.absolute_path(upload)
    File.join(
      absolute_base_dir(upload.model),
      upload.path # already contain the dynamic_segment, see #upload_path
    )
  end

  def self.base_dir(model, store = Store::LOCAL)
    decorated_model = model
    decorated_model = Storage::HashedProject.new(model) if store == Store::REMOTE

    model_path_segment(decorated_model)
  end

  # used in migrations and import/exports
  def self.absolute_base_dir(model)
    File.join(root, base_dir(model))
  end

  # Returns the part of `store_dir` that can change based on the model's current
  # path
  #
  # This is used to build Upload paths dynamically based on the model's current
  # namespace and path, allowing us to ignore renames or transfers.
  #
  # model - Object that responds to `full_path` and `disk_path`
  #
  # Returns a String without a trailing slash
  def self.model_path_segment(model)
    case model
    when Storage::HashedProject then model.disk_path
    else
      model.hashed_storage?(:attachments) ? model.disk_path : model.full_path
    end
  end

  def self.generate_secret
    SecureRandom.hex
  end

  def self.extract_dynamic_path(path)
    DYNAMIC_PATH_PATTERN.match(path)
  end

  def upload_paths(identifier)
    [
      File.join(secret, identifier),
      File.join(base_dir(Store::REMOTE), secret, identifier)
    ]
  end

  attr_accessor :model

  def initialize(model, mounted_as = nil, **uploader_context)
    super(model, nil, **uploader_context)

    @model = model
    apply_context!(uploader_context)
  end

  def initialize_copy(from)
    super

    @secret = self.class.generate_secret
    @upload = nil # calling record_upload would delete the old upload if set
  end

  # enforce the usage of Hashed storage when storing to
  # remote store as the FileMover doesn't support OS
  def base_dir(store = nil)
    self.class.base_dir(@model, store || object_store)
  end

  # we don't need to know the actual path, an uploader instance should be
  # able to yield the file content on demand, so we should build the digest
  def absolute_path
    self.class.absolute_path(@upload)
  end

  def upload_path
    if file_storage?
      # Legacy path relative to project.full_path
      File.join(dynamic_segment, identifier)
    else
      File.join(store_dir, identifier)
    end
  end

  def store_dirs
    {
      Store::LOCAL => File.join(base_dir, dynamic_segment),
      Store::REMOTE => File.join(base_dir(ObjectStorage::Store::REMOTE), dynamic_segment)
    }
  end

  def to_h
    {
      alt:      markdown_name,
      url:      secure_url,
      markdown: markdown_link
    }
  end

  def upload=(value)
    super

    return unless value
    return if apply_context!(value.uploader_context)

    # fallback to the regex based extraction
    if matches = self.class.extract_dynamic_path(value.path)
      @secret = matches[:secret]
      @identifier = matches[:identifier]
    end
  end

  def secret
    @secret ||= self.class.generate_secret
  end

  # return a new uploader with a file copy on another project
  def self.copy_to(uploader, to_project)
    moved = self.new(to_project)
    moved.object_store = uploader.object_store
    moved.filename = uploader.filename

    moved.copy_file(uploader.file)
    moved
  end

  def copy_file(file)
    to_path = if file_storage?
                File.join(self.class.root, store_path)
              else
                store_path
              end

    self.file = file.copy_to(to_path)
    record_upload # after_store is not triggered
  end

  private

  def apply_context!(uploader_context)
    @secret, @identifier = uploader_context.values_at(:secret, :identifier)

    !!(@secret && @identifier)
  end

  def build_upload
    super.tap do |upload|
      upload.secret = secret
    end
  end

  def prune_store_dir
    storage.delete_dir!(store_dir) # only remove when empty
  end

  def identifier
    @identifier ||= filename
  end

  def dynamic_segment
    secret
  end

  def secure_url
    File.join('/uploads', @secret, file.filename)
  end
end