summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKamil Trzcinski <ayufan@ayufan.eu>2017-04-28 19:13:08 +0200
committerKamil Trzcinski <ayufan@ayufan.eu>2017-04-28 19:18:40 +0200
commit757742de710658d38cd718a78c4273ad5c8bd6f7 (patch)
tree60ce7d74f42ea42b65c9a0cb95934cb0f6896ab1
parentf3eb2314879d76ac55a672e0b46bdb1e883ceeda (diff)
downloadgitlab-ce-zj-artifacts-object-store.tar.gz
Support cache storagezj-artifacts-object-store
-rw-r--r--app/models/ci/build.rb14
-rw-r--r--app/services/projects/update_pages_service.rb22
-rw-r--r--app/uploaders/artifact_uploader.rb116
-rw-r--r--app/uploaders/concerns/object_storeable.rb36
-rw-r--r--config/initializers/carrierwave.rb15
-rw-r--r--lib/api/runner.rb3
-rw-r--r--lib/gitlab/ci/build/artifacts/metadata.rb2
7 files changed, 81 insertions, 127 deletions
diff --git a/app/models/ci/build.rb b/app/models/ci/build.rb
index 109ce404237..7cf3a23b307 100644
--- a/app/models/ci/build.rb
+++ b/app/models/ci/build.rb
@@ -316,7 +316,7 @@ module Ci
end
def browsable_artifacts?
- !Gitlab.config.artifacts.object_store.enabled && artifacts_metadata?
+ !artifacts_file.remote_storage? && artifacts_metadata?
end
def artifacts_metadata?
@@ -324,12 +324,14 @@ module Ci
end
def artifacts_metadata_entry(path, **options)
- metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
- artifacts_metadata.path,
- path,
- **options)
+ artifacts_metadata.use_file do |metadata_path|
+ metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
+ metadata_path,
+ path,
+ **options)
- metadata.to_entry
+ metadata.to_entry
+ end
end
def erase_artifacts!
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index cf1c800a152..23a49a5e68a 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -8,9 +8,6 @@ module Projects
def initialize(project, job)
@project, @job = project, job
-
- # If we store artifacts on object store, we need to get them local
- extractable_artifacts
end
def execute
@@ -39,7 +36,6 @@ module Projects
error(e.message)
ensure
job.erase_artifacts! unless job.has_expiring_artifacts?
- FileUtils.rm_rf(artifacts) if Gitlab.config.artifacts.object_store.enabled
end
private
@@ -90,8 +86,11 @@ module Projects
# -n never overwrite existing files
# We add * to end of SITE_PATH, because we want to extract SITE_PATH and all subdirectories
site_path = File.join(SITE_PATH, '*')
- unless system(*%W(unzip -n #{artifacts} #{site_path} -d #{temp_path}))
- raise 'pages failed to extract'
+
+ job.artifacts_file.use_file do |artifacts_path|
+ unless system(*%W(unzip -n #{artifacts_path} #{site_path} -d #{temp_path}))
+ raise 'pages failed to extract'
+ end
end
end
@@ -146,17 +145,6 @@ module Projects
job.ref
end
- def artifacts
- job.artifacts_file.path
- end
-
- def extractable_artifacts
- return unless Gitlab.config.artifacts.object_store.enabled
-
- job.artifacts_file.download!(job.artifacts_file.url)
- job.artifacts_metadata.download!(job.artifacts_metadata.url)
- end
-
def latest_sha
project.commit(job.ref).try(:sha).to_s
end
diff --git a/app/uploaders/artifact_uploader.rb b/app/uploaders/artifact_uploader.rb
index b7b065b53bb..41068b7a070 100644
--- a/app/uploaders/artifact_uploader.rb
+++ b/app/uploaders/artifact_uploader.rb
@@ -3,84 +3,39 @@ class ArtifactUploader < GitlabUploader
storage_options Gitlab.config.artifacts
- def self.artifacts_path
- if object_store_options.enabled
- ""
- else
- storage_options.path + "/"
- end
- end
-
- def artifacts_path
- self.class.artifacts_path
- end
-
- def self.artifacts_upload_path
- self.artifacts_path + 'tmp/uploads'
- end
-
- def self.artifacts_cache_path
- self.artifacts_path + 'tmp/cache'
- end
-
- attr_accessor :job, :field
-
- def self.object_store_options
- Gitlab.config.artifacts.object_store
- end
-
- if object_store_options.enabled
- storage :fog
- else
- storage :file
- end
+ attr_reader :job, :field
def initialize(job, field)
@job, @field = job, field
end
def store_dir
- self.class.artifacts_path + job.artifacts_path
+ if remote_cache_storage?
+ job.artifacts_path
+ else
+ File.join(storage_options.artifacts_path, job.artifacts_path)
+ end
end
def cache_dir
- self.class.artifacts_cache_path + job.artifacts_path
- end
-
- def fog_directory
- return super unless use_object_store?
-
- self.class.object_store_options.bucket
- end
-
- # Override the credentials
- def fog_credentials
- return super unless use_object_store?
-
- {
- provider: object_store_options.provider,
- aws_access_key_id: object_store_options.access_key_id,
- aws_secret_access_key: object_store_options.secret_access_key,
- region: object_store_options.region,
- endpoint: object_store_options.endpoint,
- path_style: true
- }
+ if remote_cache_storage?
+ File.join('tmp/cache', job.artifacts_path)
+ else
+ File.join(storage_options.artifacts_path, 'tmp/cache', job.artifacts_path)
+ end
end
def exists?
file.try(:exists?)
end
- def fog_public
- false
- end
-
def upload_authorize
- result = { TempPath: ArtifactUploader.artifacts_upload_path }
+ self.cache_id = CarrierWave.generate_cache_id
+ self.original_filename = SecureRandom.hex
+
+ result = { TempPath: cache_path }
use_cache_object_storage do
- self.cache_id = CarrierWave.generate_cache_id
- self.original_filename = SecureRandom.hex
expire_at = ::Fog::Time.now + fog_authenticated_url_expiration
result[:UploadPath] = cache_name
result[:UploadURL] = storage.connection.put_object_url(
@@ -90,53 +45,32 @@ class ArtifactUploader < GitlabUploader
result
end
- def upload_cache_path(path = nil)
- File.join(cache_dir, path)
- end
-
def cache!(new_file = nil)
- use_cache_object_storage do
- retrieve_from_cache!(new_file.upload_path)
- @filename = new_file.original_filename
- store_path
- return
- end if new_file&.upload_path
-
- super
+ unless retrive_uploaded_file!(new_file&.upload_path, new_file.original_filename)
+ super
+ end
end
private
- def object_store_options
- self.class.object_store_options
- end
-
- def use_object_store?
- object_store_options.enabled
- end
-
def cache_storage
- if @use_storage_for_cache
+ if @use_storage_for_cache || cached? && remote_file?
storage
else
super
end
end
- def use_cache_object_storage
+ def retrive_uploaded_file!(identifier, filename)
+ return unless identifier
+ return unless filename
return unless use_object_store?
@use_storage_for_cache = true
- yield
+
+ retrieve_from_cache!(identifier)
+ @filename = filename
ensure
@use_storage_for_cache = false
end
-
- def move_to_store
- storage.is_a?(CarrierWave::Storage::File)
- end
-
- def move_to_cache
- cache_storage.is_a?(CarrierWave::Storage::File)
- end
end
diff --git a/app/uploaders/concerns/object_storeable.rb b/app/uploaders/concerns/object_storeable.rb
index e5b7624084c..6438dd42d8d 100644
--- a/app/uploaders/concerns/object_storeable.rb
+++ b/app/uploaders/concerns/object_storeable.rb
@@ -10,7 +10,7 @@ module ObjectStoreable
@storage_options = options
class_eval do
- storage @storage_options.object_store.enabled ? :fog : :file
+ storage use_object_store? ? :fog : :file
end
end
end
@@ -30,6 +30,7 @@ module ObjectStoreable
aws_access_key_id: @storage_options.access_key_id,
aws_secret_access_key: @storage_options.secret_access_key,
region: @storage_options.region,
+ endpoint: @storage_options.endpoint,
path_style: true
}
end
@@ -41,4 +42,37 @@ module ObjectStoreable
def use_object_store?
@storage_options.object_store.enabled
end
+
+ def move_to_store
+ !use_object_store?
+ end
+
+ def move_to_cache
+ !use_object_store?
+ end
+
+ def remote_file?
+ file&.is_a?(CarrierWave::Storage::Fog::File)
+ end
+
+ def remote_storage?
+ storage.is_a?(CarrierWave::Storage::Fog)
+ end
+
+ def remote_cache_storage?
+ cache_storage.is_a?(CarrierWave::Storage::Fog)
+ end
+
+ def use_file
+ if use_object_store?
+ return yield path
+ end
+
+ begin
+ cache_stored_file!
+ yield cache_path
+ ensure
+ cache_storage.delete_dir!(cache_path(nil))
+ end
+ end
end
diff --git a/config/initializers/carrierwave.rb b/config/initializers/carrierwave.rb
index 3bb2d52658b..1933afcbfb1 100644
--- a/config/initializers/carrierwave.rb
+++ b/config/initializers/carrierwave.rb
@@ -2,19 +2,16 @@ CarrierWave::SanitizedFile.sanitize_regexp = /[^[:word:]\.\-\+]/
aws_file = Rails.root.join('config', 'aws.yml')
-CarrierWave.configure do |config|
- config.fog_provider = "fog/rackspace/storage"
- config.fog_credentials = {
- provider: 'AWS', # required
- aws_access_key_id: 'ddd',
- aws_secret_access_key: 'ccc',
- }
-end
-
if File.exist?(aws_file)
AWS_CONFIG = YAML.load(File.read(aws_file))[Rails.env]
CarrierWave.configure do |config|
+ config.fog_credentials = {
+ provider: 'AWS', # required
+ aws_access_key_id: AWS_CONFIG['access_key_id'], # required
+ aws_secret_access_key: AWS_CONFIG['secret_access_key'], # required
+ region: AWS_CONFIG['region'], # optional, defaults to 'us-east-1'
+ }
# required
config.fog_directory = AWS_CONFIG['bucket']
diff --git a/lib/api/runner.rb b/lib/api/runner.rb
index 2b85eb8ea68..2bcf487fa38 100644
--- a/lib/api/runner.rb
+++ b/lib/api/runner.rb
@@ -223,8 +223,7 @@ module API
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
if job.save
- status :created
- #present job, with: Entities::JobRequest::Response
+ present job, with: Entities::JobRequest::Response
else
render_validation_error!(job)
end
diff --git a/lib/gitlab/ci/build/artifacts/metadata.rb b/lib/gitlab/ci/build/artifacts/metadata.rb
index a788fb3fcbc..a375ccbece0 100644
--- a/lib/gitlab/ci/build/artifacts/metadata.rb
+++ b/lib/gitlab/ci/build/artifacts/metadata.rb
@@ -60,7 +60,7 @@ module Gitlab
begin
path = read_string(gz).force_encoding('UTF-8')
meta = read_string(gz).force_encoding('UTF-8')
-
+
next unless path.valid_encoding? && meta.valid_encoding?
next unless path =~ match_pattern
next if path =~ INVALID_PATH_PATTERN