Commit 678620cc authored by Kamil Trzciński's avatar Kamil Trzciński

Add `direct_upload` setting for artifacts

parent 9b1677b2
...@@ -31,7 +31,9 @@ class Projects::LfsStorageController < Projects::GitHttpClientController ...@@ -31,7 +31,9 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
render plain: 'Unprocessable entity', status: 422 render plain: 'Unprocessable entity', status: 422
end end
rescue ActiveRecord::RecordInvalid rescue ActiveRecord::RecordInvalid
render_400 render_lfs_forbidden
rescue UploadedFile::InvalidPathError
render_lfs_forbidden
rescue ObjectStorage::RemoteStoreError rescue ObjectStorage::RemoteStoreError
render_lfs_forbidden render_lfs_forbidden
end end
...@@ -66,10 +68,11 @@ class Projects::LfsStorageController < Projects::GitHttpClientController ...@@ -66,10 +68,11 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
end end
def create_file!(oid, size) def create_file!(oid, size)
LfsObject.new(oid: oid, size: size).tap do |object| uploaded_file = UploadedFile.from_params(
object.file.store_workhorse_file!(params, :file) params, :file, LfsObjectUploader.workhorse_local_upload_path)
object.save! return unless uploaded_file
end
LfsObject.create!(oid: oid, size: size, file: uploaded_file)
end end
def link_to_project!(object) def link_to_project!(object)
......
...@@ -7,6 +7,7 @@ module Ci ...@@ -7,6 +7,7 @@ module Ci
belongs_to :project belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
before_save :update_file_store
before_save :set_size, if: :file_changed? before_save :set_size, if: :file_changed?
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
...@@ -21,6 +22,10 @@ module Ci ...@@ -21,6 +22,10 @@ module Ci
trace: 3 trace: 3
} }
def update_file_store
self.file_store = file.object_store
end
def self.artifacts_size_for(project) def self.artifacts_size_for(project)
self.where(project: project).sum(:size) self.where(project: project).sum(:size)
end end
......
...@@ -2,6 +2,8 @@ class JobArtifactUploader < GitlabUploader ...@@ -2,6 +2,8 @@ class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath extend Workhorse::UploadPath
include ObjectStorage::Concern include ObjectStorage::Concern
ObjectNotReadyError = Class.new(StandardError)
storage_options Gitlab.config.artifacts storage_options Gitlab.config.artifacts
def size def size
...@@ -25,6 +27,8 @@ class JobArtifactUploader < GitlabUploader ...@@ -25,6 +27,8 @@ class JobArtifactUploader < GitlabUploader
private private
def dynamic_segment def dynamic_segment
raise ObjectNotReadyError, 'JobArtifact is not ready' unless model.id
creation_date = model.created_at.utc.strftime('%Y_%m_%d') creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash, File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
......
...@@ -2,6 +2,8 @@ class LegacyArtifactUploader < GitlabUploader ...@@ -2,6 +2,8 @@ class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath extend Workhorse::UploadPath
include ObjectStorage::Concern include ObjectStorage::Concern
ObjectNotReadyError = Class.new(StandardError)
storage_options Gitlab.config.artifacts storage_options Gitlab.config.artifacts
def store_dir def store_dir
...@@ -11,6 +13,8 @@ class LegacyArtifactUploader < GitlabUploader ...@@ -11,6 +13,8 @@ class LegacyArtifactUploader < GitlabUploader
private private
def dynamic_segment def dynamic_segment
raise ObjectNotReadyError, 'Build is not ready' unless model.id
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s) File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end end
end end
...@@ -156,11 +156,10 @@ module ObjectStorage ...@@ -156,11 +156,10 @@ module ObjectStorage
end end
def workhorse_authorize def workhorse_authorize
if options = workhorse_remote_upload_options {
{ RemoteObject: options } RemoteObject: workhorse_remote_upload_options,
else TempPath: workhorse_local_upload_path
{ TempPath: workhorse_local_upload_path } }.compact
end
end end
def workhorse_local_upload_path def workhorse_local_upload_path
...@@ -293,16 +292,14 @@ module ObjectStorage ...@@ -293,16 +292,14 @@ module ObjectStorage
} }
end end
def store_workhorse_file!(params, identifier) def cache!(new_file = sanitized_file)
filename = params["#{identifier}.name"] # We intercept ::UploadedFile which might be stored on remote storage
# We use that for "accelerated" uploads, where we store result on remote storage
if remote_object_id = params["#{identifier}.remote_id"] if new_file.is_a?(::UploadedFile) && new_file.remote_id
store_remote_file!(remote_object_id, filename) return cache_remote_file!(new_file.remote_id, new_file.original_filename)
elsif local_path = params["#{identifier}.path"]
store_local_file!(local_path, filename)
else
raise RemoteStoreError, 'Bad file'
end end
super
end end
private private
...@@ -313,36 +310,29 @@ module ObjectStorage ...@@ -313,36 +310,29 @@ module ObjectStorage
self.file_storage? self.file_storage?
end end
def store_remote_file!(remote_object_id, filename) def cache_remote_file!(remote_object_id, original_filename)
raise RemoteStoreError, 'Missing filename' unless filename
file_path = File.join(TMP_UPLOAD_PATH, remote_object_id) file_path = File.join(TMP_UPLOAD_PATH, remote_object_id)
file_path = Pathname.new(file_path).cleanpath.to_s file_path = Pathname.new(file_path).cleanpath.to_s
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/') raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/')
self.object_store = Store::REMOTE
# TODO: # TODO:
# This should be changed to make use of `tmp/cache` mechanism # This should be changed to make use of `tmp/cache` mechanism
# instead of using custom upload directory, # instead of using custom upload directory,
# using tmp/cache makes this implementation way easier than it is today # using tmp/cache makes this implementation way easier than it is today
CarrierWave::Storage::Fog::File.new(self, storage, file_path).tap do |file| CarrierWave::Storage::Fog::File.new(self, storage_for(Store::REMOTE), file_path).tap do |file|
raise RemoteStoreError, 'Missing file' unless file.exists? raise RemoteStoreError, 'Missing file' unless file.exists?
self.filename = filename # Remote stored file, we force to store on remote storage
self.file = storage.store!(file) self.object_store = Store::REMOTE
end
end
def store_local_file!(local_path, filename)
raise RemoteStoreError, 'Missing filename' unless filename
root_path = File.realpath(self.class.workhorse_local_upload_path)
file_path = File.realpath(local_path)
raise RemoteStoreError, 'Bad file path' unless file_path.start_with?(root_path)
self.object_store = Store::LOCAL # TODO:
self.store!(UploadedFile.new(file_path, filename)) # We store file internally and force it to be considered as `cached`
# This makes CarrierWave to store file in permament location (copy/delete)
# once this object is saved, but not sooner
@cache_id = "force-to-use-cache" # rubocop:disable Gitlab/ModuleWithInstanceVariables
@file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
@filename = original_filename # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end end
# this is a hack around CarrierWave. The #migrate method needs to be # this is a hack around CarrierWave. The #migrate method needs to be
......
...@@ -308,6 +308,7 @@ Settings.artifacts['max_size'] ||= 100 # in megabytes ...@@ -308,6 +308,7 @@ Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({}) Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil? Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['direct_upload'] = false if Settings.artifacts['object_store']['direct_upload'].nil?
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil? Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil? Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy # Convert upload connection settings to use string keys, to make Fog happy
......
artifacts_object_store = Gitlab.config.artifacts.object_store
if artifacts_object_store.enabled &&
artifacts_object_store.direct_upload &&
artifacts_object_store.connection&.provider.to_s != 'Google'
raise "Only 'Google' is supported as a object storage provider when 'direct_upload' of artifacts is used"
end
...@@ -108,6 +108,7 @@ For source installations the following settings are nested under `artifacts:` an ...@@ -108,6 +108,7 @@ For source installations the following settings are nested under `artifacts:` an
|---------|-------------|---------| |---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` | | `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artfacts will be stored| | | `remote_directory` | The bucket name where Artfacts will be stored| |
| `direct_upload` | Set to true to enable direct upload of Artifacts without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. Currently only `Google` provider is supported | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` | | `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` | | `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | | | `connection` | Various connection options described below | |
......
...@@ -78,6 +78,14 @@ module API ...@@ -78,6 +78,14 @@ module API
rack_response({ 'message' => '404 Not found' }.to_json, 404) rack_response({ 'message' => '404 Not found' }.to_json, 404)
end end
rescue_from UploadedFile::InvalidPathError do |e|
rack_response({ 'message' => e.message }.to_json, 400)
end
rescue_from ObjectStorage::RemoteStoreError do |e|
rack_response({ 'message' => e.message }.to_json, 500)
end
# Retain 405 error rather than a 500 error for Grape 0.15.0+. # Retain 405 error rather than a 500 error for Grape 0.15.0+.
# https://github.com/ruby-grape/grape/blob/a3a28f5b5dfbb2797442e006dbffd750b27f2a76/UPGRADING.md#changes-to-method-not-allowed-routes # https://github.com/ruby-grape/grape/blob/a3a28f5b5dfbb2797442e006dbffd750b27f2a76/UPGRADING.md#changes-to-method-not-allowed-routes
rescue_from Grape::Exceptions::MethodNotAllowed do |e| rescue_from Grape::Exceptions::MethodNotAllowed do |e|
......
...@@ -388,28 +388,6 @@ module API ...@@ -388,28 +388,6 @@ module API
# file helpers # file helpers
def uploaded_file(field, uploads_path)
if params[field]
bad_request!("#{field} is not a file") unless params[field][:filename]
return params[field]
end
return nil unless params["#{field}.path"] && params["#{field}.name"]
# sanitize file paths
# this requires all paths to exist
required_attributes! %W(#{field}.path)
uploads_path = File.realpath(uploads_path)
file_path = File.realpath(params["#{field}.path"])
bad_request!('Bad file path') unless file_path.start_with?(uploads_path)
UploadedFile.new(
file_path,
params["#{field}.name"],
params["#{field}.type"] || 'application/octet-stream'
)
end
def present_disk_file!(path, filename, content_type = 'application/octet-stream') def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path) filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}" header['Content-Disposition'] = "attachment; filename=#{filename}"
......
...@@ -186,7 +186,7 @@ module API ...@@ -186,7 +186,7 @@ module API
status 200 status 200
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
Gitlab::Workhorse.artifact_upload_ok JobArtifactUploader.workhorse_authorize
end end
desc 'Upload artifacts for job' do desc 'Upload artifacts for job' do
...@@ -201,14 +201,15 @@ module API ...@@ -201,14 +201,15 @@ module API
requires :id, type: Integer, desc: %q(Job's ID) requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token) optional :token, type: String, desc: %q(Job's authentication token)
optional :expire_in, type: String, desc: %q(Specify when artifacts should expire) optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
optional :file, type: File, desc: %q(Artifact's file)
optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse)) optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse)) optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse)) optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file) optional 'file.size', type: Integer, desc: %q(real size of file (generated by Workhorse))
optional 'file.sha256', type: String, desc: %q(sha256 checksum of the file (generated by Workhorse))
optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse)) optional 'metadata.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse)) optional 'metadata.name', type: String, desc: %q(filename (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of the file) optional 'metadata.size', type: Integer, desc: %q(real size of metadata (generated by Workhorse))
optional 'metadata.sha256', type: String, desc: %q(sha256 checksum of metadata (generated by Workhorse))
end end
post '/:id/artifacts' do post '/:id/artifacts' do
not_allowed! unless Gitlab.config.artifacts.enabled not_allowed! unless Gitlab.config.artifacts.enabled
...@@ -217,21 +218,34 @@ module API ...@@ -217,21 +218,34 @@ module API
job = authenticate_job! job = authenticate_job!
forbidden!('Job is not running!') unless job.running? forbidden!('Job is not running!') unless job.running?
workhorse_upload_path = JobArtifactUploader.workhorse_upload_path artifacts = UploadedFile.from_params(params, :file, JobArtifactUploader.workhorse_local_upload_path)
artifacts = uploaded_file(:file, workhorse_upload_path) metadata = UploadedFile.from_params(params, :metadata, JobArtifactUploader.workhorse_local_upload_path)
metadata = uploaded_file(:metadata, workhorse_upload_path)
bad_request!('Missing artifacts file!') unless artifacts bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size file_to_large! unless artifacts.size < max_artifacts_size
bad_request!("Already uploaded") if job.job_artifacts_archive
expire_in = params['expire_in'] || expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive(project: job.project, file_type: :archive, file: artifacts, file_sha256: params['file.sha256'], expire_in: expire_in) job.build_job_artifacts_archive(
job.build_job_artifacts_metadata(project: job.project, file_type: :metadata, file: metadata, file_sha256: params['metadata.sha256'], expire_in: expire_in) if metadata project: job.project,
job.artifacts_expire_in = expire_in file: artifacts,
file_type: :archive,
file_sha256: artifacts.sha256,
expire_in: expire_in)
if metadata
job.build_job_artifacts_metadata(
project: job.project,
file: metadata,
file_type: :metadata,
file_sha256: metadata.sha256,
expire_in: expire_in)
end
if job.save if job.update(artifacts_expire_in: expire_in)
present job, with: Entities::JobRequest::Response present job, with: Entities::JobRequest::Response
else else
render_validation_error!(job) render_validation_error!(job)
......
...@@ -82,7 +82,7 @@ module Gitlab ...@@ -82,7 +82,7 @@ module Gitlab
end end
def open_file(path, name) def open_file(path, name)
::UploadedFile.new(path, name || File.basename(path), 'application/octet-stream') ::UploadedFile.new(path, filename: name || File.basename(path), content_type: 'application/octet-stream')
end end
end end
......
...@@ -36,10 +36,6 @@ module Gitlab ...@@ -36,10 +36,6 @@ module Gitlab
} }
end end
def artifact_upload_ok
{ TempPath: JobArtifactUploader.workhorse_upload_path }
end
def send_git_blob(repository, blob) def send_git_blob(repository, blob)
params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_raw_show, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) params = if Gitlab::GitalyClient.feature_enabled?(:workhorse_raw_show, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT)
{ {
......
require "tempfile" require "tempfile"
require "tmpdir"
require "fileutils" require "fileutils"
# Taken from: Rack::Test::UploadedFile
class UploadedFile class UploadedFile
InvalidPathError = Class.new(StandardError)
# The filename, *not* including the path, of the "uploaded" file # The filename, *not* including the path, of the "uploaded" file
attr_reader :original_filename attr_reader :original_filename
...@@ -12,14 +14,46 @@ class UploadedFile ...@@ -12,14 +14,46 @@ class UploadedFile
# The content type of the "uploaded" file # The content type of the "uploaded" file
attr_accessor :content_type attr_accessor :content_type
def initialize(path, filename, content_type = "text/plain") attr_reader :remote_id
raise "#{path} file does not exist" unless ::File.exist?(path) attr_reader :sha256
def initialize(path, filename: nil, content_type: "application/octet-stream", sha256: nil, remote_id: nil)
raise InvalidPathError, "#{path} file does not exist" unless ::File.exist?(path)
@content_type = content_type @content_type = content_type
@original_filename = filename || ::File.basename(path) @original_filename = filename || ::File.basename(path)
@content_type = content_type
@sha256 = sha256
@remote_id = remote_id
@tempfile = File.new(path, 'rb') @tempfile = File.new(path, 'rb')
end end
def self.from_params(params, field, upload_path)
unless params["#{field}.path"]
raise InvalidPathError, "file is invalid" if params["#{field}.remote_id"]
return
end
file_path = File.realpath(params["#{field}.path"])
unless self.allowed_path?(file_path, [upload_path, Dir.tmpdir].compact)
raise InvalidPathError, "insecure path used '#{file_path}'"
end
UploadedFile.new(file_path,
filename: params["#{field}.name"],
content_type: params["#{field}.type"] || 'application/octet-stream',
sha256: params["#{field}.sha256"],
remote_id: params["#{field}.remote_id"])
end
def self.allowed_path?(file_path, paths)
paths.any? do |path|
File.exist?(path) && file_path.start_with?(File.realpath(path))
end
end
def path def path
@tempfile.path @tempfile.path
end end
......
require 'spec_helper'
describe 'Artifacts direct upload support' do
subject do
load Rails.root.join('config/initializers/artifacts_direct_upload_support.rb')
end
let(:connection) do
{ provider: provider }
end
before do
stub_artifacts_setting(
object_store: {
enabled: enabled,
direct_upload: direct_upload,
connection: connection
})
end
context 'when object storage is enabled' do
let(:enabled) { true }
context 'when direct upload is enabled' do
let(:direct_upload) { true }
context 'when provider is Google' do
let(:provider) { 'Google' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
context 'when connection is empty' do
let(:connection) { nil }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
context 'when other provider is used' do
let(:provider) { 'AWS' }
it 'raises an error' do
expect { subject }.to raise_error /object storage provider when 'direct_upload' of artifacts is used/
end
end
end
context 'when direct upload is disabled' do
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
context 'when object storage is disabled' do
let(:enabled) { false }
let(:direct_upload) { false }
let(:provider) { 'AWS' }
it 'succeeds' do
expect { subject }.not_to raise_error
end
end
end
require 'spec_helper'
describe UploadedFile do
describe ".from_params" do
let(:temp_dir) { Dir.tmpdir }
let(:temp_file) { Tempfile.new("test", temp_dir) }
let(:upload_path) { nil }
subject do
described_class.from_params(params, :file, upload_path)
end
before do
FileUtils.touch(temp_file)
end
after do
FileUtils.rm_f(temp_file)
FileUtils.rm_r(upload_path) if upload_path
end
context 'when valid file is specified' do
context 'only local path is specified' do
let(:params) do
{ 'file.path' => temp_file.path }
end
it "succeeds" do
is_expected.not_to be_nil
end
it "generates filename from path" do
expect(subject.original_filename).to eq(::File.basename(temp_file.path))
end
end
context 'all parameters are specified' do
let(:params) do
{ 'file.path' => temp_file.path,
'file.name' => 'my_file.txt',
'file.type' => 'my/type',
'file.sha256' => 'sha256',
'file.remote_id' => 'remote_id' }
end
it "succeeds" do
is_expected.not_to be_nil
end
it "generates filename from path" do
expect(subject.original_filename).to eq('my_file.txt')
expect(subject.content_type).to eq('my/type')
expect(subject.sha256).to eq('sha256')
expect(subject.remote_id).to eq('remote_id')
end
end
end
context 'when no params are specified' do
let(:params) do
{}
end
it "does not return an object" do
is_expected.to be_nil
end
end
context 'when only remote id is specified' do
let(:params) do
{ 'file.remote_id' => 'remote_id' }
end
it "raises an error" do
expect { subject }.to raise_error(UploadedFile::InvalidPathError, /file is invalid/)
end
end
context 'when verifying allowed paths' do
let(:params) do
{ 'file.path' => temp_file.path }
end
context 'when file is stored in system temporary folder' do
let(:temp_dir) { Dir.tmpdir }
it "succeeds" do
is_expected.not_to be_nil
end
end
context 'when file is stored in user provided upload path' do
let(:upload_path) { Dir.mktmpdir }
let(:temp_dir) { upload_path }
it "succeeds" do
is_expected.not_to be_nil
end
end
context 'when file is stored outside of user provided upload path' do
let!(:generated_dir) { Dir.mktmpdir }
let!(:temp_dir) { Dir.mktmpdir }
before do
# We overwrite default temporary path
allow(Dir).to receive(:tmpdir).and_return(generated_dir)
end
it "raises an error" do
expect { subject }.to raise_error(UploadedFile::InvalidPathError, /insecure path used/)
end
end
end
end
end
...@@ -950,12 +950,53 @@ describe API::Runner do ...@@ -950,12 +950,53 @@ describe API::Runner do
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do context 'when using token as parameter' do
it 'authorizes posting artifacts to running job' do context 'posting artifacts to running job' do
authorize_artifacts_with_token_in_params subject do
authorize_artifacts_with_token_in_params
end
expect(response).to have_gitlab_http_status(200) shared_examples 'authorizes local file' do
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE) it 'succeeds' do
expect(json_response['TempPath']).not_to be_nil subject
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
end
end
context 'when using local storage' do
it_behaves_like 'authorizes local file'
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: true)
end
it 'succeeds' do
subject
expect(response).to have_gitlab_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
end
end
context 'when direct upload is disabled' do
before do
stub_artifacts_object_storage(enabled: true, direct_upload: false)
end
it_behaves_like 'authorizes local file'
end
end
end end
it 'fails to post too large artifact' do it 'fails to post too large artifact' do
...@@ -1051,20 +1092,45 @@ describe API::Runner do ...@@ -1051,20 +1092,45 @@ describe API::Runner do
end end
end end
context 'when uses regular file post' do context 'when uses accelerated file post' do
before do context 'for file stored locally' do
upload_artifacts(file_upload, headers_with_token, false) before do
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end end
it_behaves_like 'successful artifacts upload' context 'for file stored remotelly' do
end let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
context 'when uses accelerated file post' do before do
before do fog_connection.directories.get('artifacts').files.create(
upload_artifacts(file_upload, headers_with_token, true) key: 'tmp/upload/12312300',
end body: 'content'
)
it_behaves_like 'successful artifacts upload' upload_artifacts(file_upload, headers_with_token,
{ 'file.remote_id' => remote_id })
end
context 'when valid remote_id is used' do
let(:remote_id) { '12312300' }
it_behaves_like 'successful artifacts upload'
end
context 'when invalid remote_id is used' do
let(:remote_id) { 'invalid id' }
it 'responds with bad request' do
expect(response).to have_gitlab_http_status(500)
expect(json_response['message']).to eq("Missing file")
end
end
end
end end
context 'when using runners token' do context 'when using runners token' do
...@@ -1208,15 +1274,19 @@ describe API::Runner do ...@@ -1208,15 +1274,19 @@ describe API::Runner do
end end
context 'when artifacts are being stored outside of tmp path' do context 'when artifacts are being stored outside of tmp path' do
let(:new_tmpdir) { Dir.mktmpdir }
before do before do
# init before overwriting tmp dir
file_upload
# by configuring this path we allow to pass file from @tmpdir only # by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory # but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
end end
after do after do
FileUtils.remove_entry @tmpdir FileUtils.remove_entry(new_tmpdir)
end end
it' "fails to post artifacts for outside of tmp path"' do it' "fails to post artifacts for outside of tmp path"' do
...@@ -1226,12 +1296,11 @@ describe API::Runner do ...@@ -1226,12 +1296,11 @@ describe API::Runner do
end end
end end
def upload_artifacts(file, headers = {}, accelerated = true) def upload_artifacts(file, headers = {}, params = {})
params = if accelerated params = params.merge({
{ 'file.path' => file.path, 'file.name' => file.original_filename } 'file.path' => file.path,
else 'file.name' => file.original_filename
{ 'file' => file } })
end
post api("/jobs/#{job.id}/artifacts"), params, headers post api("/jobs/#{job.id}/artifacts"), params, headers
end end
......
...@@ -1016,7 +1016,7 @@ describe 'Git LFS API and storage' do ...@@ -1016,7 +1016,7 @@ describe 'Git LFS API and storage' do
it_behaves_like 'a valid response' do it_behaves_like 'a valid response' do
it 'responds with status 200, location of lfs remote store and object details' do it 'responds with status 200, location of lfs remote store and object details' do
expect(json_response['TempPath']).to be_nil expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to have_key('ID') expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL') expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL') expect(json_response['RemoteObject']).to have_key('StoreURL')
...@@ -1073,7 +1073,9 @@ describe 'Git LFS API and storage' do ...@@ -1073,7 +1073,9 @@ describe 'Git LFS API and storage' do
['123123', '../../123123'].each do |remote_id| ['123123', '../../123123'].each do |remote_id|
context "with invalid remote_id: #{remote_id}" do context "with invalid remote_id: #{remote_id}" do
subject do subject do
put_finalize_with_args('file.remote_id' => remote_id) put_finalize(with_tempfile: true, args: {
'file.remote_id' => remote_id
})
end end
it 'responds with status 403' do it 'responds with status 403' do
...@@ -1093,9 +1095,10 @@ describe 'Git LFS API and storage' do ...@@ -1093,9 +1095,10 @@ describe 'Git LFS API and storage' do
end end
subject do subject do
put_finalize_with_args( put_finalize(with_tempfile: true, args: {
'file.remote_id' => '12312300', 'file.remote_id' => '12312300',
'file.name' => 'name') 'file.name' => 'name'
})
end end
it 'responds with status 200' do it 'responds with status 200' do
...@@ -1331,7 +1334,7 @@ describe 'Git LFS API and storage' do ...@@ -1331,7 +1334,7 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end end
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false) def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, args: {})
upload_path = LfsObjectUploader.workhorse_local_upload_path upload_path = LfsObjectUploader.workhorse_local_upload_path
file_path = upload_path + '/' + lfs_tmp if lfs_tmp file_path = upload_path + '/' + lfs_tmp if lfs_tmp
...@@ -1340,12 +1343,12 @@ describe 'Git LFS API and storage' do ...@@ -1340,12 +1343,12 @@ describe 'Git LFS API and storage' do
FileUtils.touch(file_path) FileUtils.touch(file_path)
end end
args = { extra_args = {
'file.path' => file_path, 'file.path' => file_path,
'file.name' => File.basename(file_path) 'file.name' => File.basename(file_path)
}.compact }
put_finalize_with_args(args) put_finalize_with_args(args.merge(extra_args).compact)
end end
def put_finalize_with_args(args) def put_finalize_with_args(args)
......
...@@ -45,6 +45,10 @@ module StubConfiguration ...@@ -45,6 +45,10 @@ module StubConfiguration
allow(Gitlab.config.lfs).to receive_messages(to_settings(messages)) allow(Gitlab.config.lfs).to receive_messages(to_settings(messages))
end end
def stub_artifacts_setting(messages)
allow(Gitlab.config.artifacts).to receive_messages(to_settings(messages))
end
def stub_storage_settings(messages) def stub_storage_settings(messages)
messages.deep_stringify_keys! messages.deep_stringify_keys!
......
...@@ -516,108 +516,46 @@ describe ObjectStorage do ...@@ -516,108 +516,46 @@ describe ObjectStorage do
end end
end end
describe '#store_workhorse_file!' do describe '#cache!' do
subject do subject do
uploader.store_workhorse_file!(params, :file) uploader.cache!(uploaded_file)
end end
context 'when local file is used' do context 'when local file is used' do
context 'when valid file is used' do context 'when valid file is used' do
let(:target_path) do let(:uploaded_file) do
File.join(uploader_class.root, uploader_class::TMP_UPLOAD_PATH) fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg')
end end
before do it "properly caches the file" do
FileUtils.mkdir_p(target_path) subject
end
context 'when no filename is specified' do
let(:params) do
{ "file.path" => "test/file" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/)
end
end
context 'when invalid file is specified' do
let(:file_path) do
File.join(target_path, "..", "test.file")
end
before do
FileUtils.touch(file_path)
end
let(:params) do
{ "file.path" => file_path,
"file.name" => "my_file.txt" }
end
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file path/)
end
end
context 'when filename is specified' do
let(:params) do
{ "file.path" => tmp_file,
"file.name" => "my_file.txt" }
end
let(:tmp_file) { Tempfile.new('filename', target_path) }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm_f(tmp_file)
end
it 'succeeds' do
expect { subject }.not_to raise_error
expect(uploader).to be_exists
end
it 'proper path is being used' do
subject
expect(uploader.path).to start_with(uploader_class.root)
expect(uploader.path).to end_with("my_file.txt")
end
it 'source file to not exist' do expect(uploader).to be_exists
subject expect(uploader.path).to start_with(uploader_class.root)
expect(uploader.filename).to eq('rails_sample.jpg')
expect(File.exist?(tmp_file.path)).to be_falsey
end
end end
end end
end end
context 'when remote file is used' do context 'when remote file is used' do
let(:temp_file) { Tempfile.new("test") }
let!(:fog_connection) do let!(:fog_connection) do
stub_uploads_object_storage(uploader_class) stub_uploads_object_storage(uploader_class)
end end
context 'when valid file is used' do before do
context 'when no filename is specified' do FileUtils.touch(temp_file)
let(:params) do end
{ "file.remote_id" => "test/123123" }
end
it 'raises an error' do after do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Missing filename/) FileUtils.rm_f(temp_file)
end end
end
context 'when valid file is used' do
context 'when invalid file is specified' do context 'when invalid file is specified' do
let(:params) do let(:uploaded_file) do
{ "file.remote_id" => "../test/123123", UploadedFile.new(temp_file.path, remote_id: "../test/123123")
"file.name" => "my_file.txt" }
end end
it 'raises an error' do it 'raises an error' do
...@@ -626,9 +564,8 @@ describe ObjectStorage do ...@@ -626,9 +564,8 @@ describe ObjectStorage do
end end
context 'when non existing file is specified' do context 'when non existing file is specified' do
let(:params) do let(:uploaded_file) do
{ "file.remote_id" => "test/12312300", UploadedFile.new(temp_file.path, remote_id: "test/123123")
"file.name" => "my_file.txt" }
end end
it 'raises an error' do it 'raises an error' do
...@@ -636,10 +573,9 @@ describe ObjectStorage do ...@@ -636,10 +573,9 @@ describe ObjectStorage do
end end
end end
context 'when filename is specified' do context 'when valid file is specified' do
let(:params) do let(:uploaded_file) do
{ "file.remote_id" => "test/123123", UploadedFile.new(temp_file.path, filename: "my_file.txt", remote_id: "test/123123")
"file.name" => "my_file.txt" }
end end
let!(:fog_file) do let!(:fog_file) do
...@@ -649,36 +585,37 @@ describe ObjectStorage do ...@@ -649,36 +585,37 @@ describe ObjectStorage do
) )
end end
it 'succeeds' do it 'file to be cached and remote stored' do
expect { subject }.not_to raise_error expect { subject }.not_to raise_error
expect(uploader).to be_exists expect(uploader).to be_exists
end expect(uploader).to be_cached
it 'path to not be temporary' do
subject
expect(uploader.path).not_to be_nil expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/upload') expect(uploader.path).not_to include('tmp/cache')
expect(uploader.url).to include('/my_file.txt') expect(uploader.url).not_to be_nil
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end end
it 'url is used' do context 'when file is stored' do
subject subject do
uploader.store!(uploaded_file)
end
expect(uploader.url).not_to be_nil it 'file to be remotely stored in permament location' do
expect(uploader.url).to include('/my_file.txt') subject
expect(uploader).to be_exists
expect(uploader).not_to be_cached
expect(uploader.path).not_to be_nil
expect(uploader.path).not_to include('tmp/upload')
expect(uploader.path).not_to include('tmp/cache')
expect(uploader.url).to include('/my_file.txt')
expect(uploader.object_store).to eq(described_class::Store::REMOTE)
end
end end
end end
end end
end end
context 'when no file is used' do
let(:params) { {} }
it 'raises an error' do
expect { subject }.to raise_error(uploader_class::RemoteStoreError, /Bad file/)
end
end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment