summaryrefslogtreecommitdiff
path: root/app/services/projects/update_pages_service.rb
blob: 523b9f4191642fb7737239fc9f8a3b3b02bd9655 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
module Projects
  class UpdatePagesService < BaseService
    BLOCK_SIZE = 32.kilobytes
    MAX_SIZE = 1.terabyte
    SITE_PATH = 'public/'.freeze

    attr_reader :build

    def initialize(project, build)
      @project, @build = project, build
    end

    def execute
      # Create status notifying the deployment of pages
      @status = create_status
      @status.enqueue!
      @status.run!

      raise 'missing pages artifacts' unless build.artifacts_file?
      raise 'pages are outdated' unless latest?

      # Create temporary directory in which we will extract the artifacts
      FileUtils.mkdir_p(tmp_path)
      Dir.mktmpdir(nil, tmp_path) do |archive_path|
        extract_archive!(archive_path)

        # Check if we did extract public directory
        archive_public_path = File.join(archive_path, 'public')
        raise 'pages miss the public folder' unless Dir.exist?(archive_public_path)
        raise 'pages are outdated' unless latest?

        deploy_page!(archive_public_path)
        success
      end
    rescue => e
      error(e.message)
    ensure
      build.erase_artifacts! unless build.has_expiring_artifacts?
    end

    private

    def success
      @status.success
      super
    end

    def error(message, http_status = nil)
      @status.allow_failure = !latest?
      @status.description = message
      @status.drop
      super
    end

    def create_status
      GenericCommitStatus.new(
        project: project,
        pipeline: build.pipeline,
        user: build.user,
        ref: build.ref,
        stage: 'deploy',
        name: 'pages:deploy'
      )
    end

    def extract_archive!(temp_path)
      if artifacts.ends_with?('.tar.gz') || artifacts.ends_with?('.tgz')
        extract_tar_archive!(temp_path)
      elsif artifacts.ends_with?('.zip')
        extract_zip_archive!(temp_path)
      else
        raise 'unsupported artifacts format'
      end
    end

    def extract_tar_archive!(temp_path)
      results = Open3.pipeline(%W(gunzip -c #{artifacts}),
                               %W(dd bs=#{BLOCK_SIZE} count=#{blocks}),
                               %W(tar -x -C #{temp_path} #{SITE_PATH}),
                               err: '/dev/null')
      raise 'pages failed to extract' unless results.compact.all?(&:success?)
    end

    def extract_zip_archive!(temp_path)
      raise 'missing artifacts metadata' unless build.artifacts_metadata?

      # Calculate page size after extract
      public_entry = build.artifacts_metadata_entry(SITE_PATH, recursive: true)

      if public_entry.total_size > max_size
        raise "artifacts for pages are too large: #{public_entry.total_size}"
      end

      # Requires UnZip at least 6.00 Info-ZIP.
      # -n  never overwrite existing files
      # We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
      site_path = File.join(SITE_PATH, '*')
      unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path}))
        raise 'pages failed to extract'
      end
    end

    def deploy_page!(archive_public_path)
      # Do atomic move of pages
      # Move and removal may not be atomic, but they are significantly faster then extracting and removal
      # 1. We move deployed public to previous public path (file removal is slow)
      # 2. We move temporary public to be deployed public
      # 3. We remove previous public path
      FileUtils.mkdir_p(pages_path)
      begin
        FileUtils.move(public_path, previous_public_path)
      rescue
      end
      FileUtils.move(archive_public_path, public_path)
    ensure
      FileUtils.rm_r(previous_public_path, force: true)
    end

    def latest?
      # check if sha for the ref is still the most recent one
      # this helps in case when multiple deployments happens
      sha == latest_sha
    end

    def blocks
      # Calculate dd parameters: we limit the size of pages
      1 + max_size / BLOCK_SIZE
    end

    def max_size
      current_application_settings.max_pages_size.megabytes || MAX_SIZE
    end

    def tmp_path
      @tmp_path ||= File.join(::Settings.pages.path, 'tmp')
    end

    def pages_path
      @pages_path ||= project.pages_path
    end

    def public_path
      @public_path ||= File.join(pages_path, 'public')
    end

    def previous_public_path
      @previous_public_path ||= File.join(pages_path, "public.#{SecureRandom.hex}")
    end

    def ref
      build.ref
    end

    def artifacts
      build.artifacts_file.path
    end

    def latest_sha
      project.commit(build.ref).try(:sha).to_s
    end

    def sha
      build.sha
    end
  end
end