Commit 6b3ef8d7 authored by Kamil Trzciński's avatar Kamil Trzciński

Enable the use of direct_upload for LFS objects

parent a8a8ad13
......@@ -17,20 +17,23 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
def upload_authorize
set_workhorse_internal_api_content_type
render json: Gitlab::Workhorse.lfs_upload_ok(oid, size)
end
def upload_finalize
unless tmp_filename
render_lfs_forbidden
return
authorized = LfsObjectUploader.workhorse_authorize
authorized.merge!(LfsOid: oid, LfsSize: size)
render json: authorized
end
if store_file(oid, size, tmp_filename)
def upload_finalize
if store_file!(oid, size)
head 200
else
render plain: 'Unprocessable entity', status: 422
end
rescue ActiveRecord::RecordInvalid
render_400
rescue ArgumentError
render_lfs_forbidden
end
private
......@@ -51,38 +54,28 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
params[:size].to_i
end
def tmp_filename
name = request.headers['X-Gitlab-Lfs-Tmp']
return if name.include?('/')
return unless oid.present? && name.start_with?(oid)
name
def store_file!(oid, size)
object = LfsObject.find_by(oid: oid, size: size)
unless object&.file&.exists?
object = create_file!(oid, size)
end
def store_file(oid, size, tmp_file)
# Define tmp_file_path early because we use it in "ensure"
tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
return unless object
object = LfsObject.find_or_create_by(oid: oid, size: size)
file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
file_exists && link_to_project(object)
ensure
FileUtils.rm_f(tmp_file_path)
link_to_project!(object)
end
def move_tmp_file_to_storage(object, path)
File.open(path) do |f|
object.file = f
def create_file!(oid, size)
LfsObject.new(oid: oid, size: size).tap do |object|
object.file.store_workhorse_file!(params, :file)
object.save!
end
object.file.store!
object.save
end
def link_to_project(object)
def link_to_project!(object)
if object && !object.projects.exists?(storage_project.id)
object.projects << storage_project
object.save
object.save!
end
end
end
......@@ -9,6 +9,12 @@ class LfsObject < ActiveRecord::Base
mount_uploader :file, LfsObjectUploader
before_save :update_file_store
def update_file_store
self.file_store = file.object_store
end
def project_allowed_access?(project)
projects.exists?(project.lfs_storage_project.id)
end
......
......@@ -2,11 +2,6 @@ class LfsObjectUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
# LfsObject are in `tmp/upload` instead of `tmp/uploads`
def self.workhorse_upload_path
File.join(root, 'tmp/upload')
end
storage_options Gitlab.config.lfs
def filename
......
......@@ -63,7 +63,7 @@ For source installations the following settings are nested under `lfs:` and then
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where LFS objects will be stored| |
| `direct_upload` | Set to true to enable direct upload of LFS without the need of local shared. Option may be removed once we decide to support only single storage for all files. | `false` |
| `direct_upload` | Set to true to enable direct upload of LFS without the need of local shared storage. Option may be removed once we decide to support only single storage for all files. | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `connection` | Various connection options described below | |
......
......@@ -10,6 +10,9 @@ module ObjectStorage
UnknownStoreError = Class.new(StandardError)
ObjectStorageUnavailable = Class.new(StandardError)
DIRECT_UPLOAD_TIMEOUT = 4.hours
TMP_UPLOAD_PATH = 'tmp/upload'.freeze
module Store
LOCAL = 1
REMOTE = 2
......@@ -124,6 +127,10 @@ module ObjectStorage
object_store_options.enabled
end
def direct_upload_enabled?
object_store_options.direct_upload
end
def background_upload_enabled?
object_store_options.background_upload
end
......@@ -143,6 +150,38 @@ module ObjectStorage
def serialization_column(model_class, mount_point)
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
def workhorse_authorize
if options = workhorse_remote_upload_options
{ ObjectStore: options }
else
{ TempPath: workhorse_local_upload_path }
end
end
def workhorse_local_upload_path
File.join(self.root, TMP_UPLOAD_PATH)
end
private
def workhorse_remote_upload_options
return unless self.object_store_enabled?
return unless self.direct_upload_enabled?
id = [CarrierWave.generate_cache_id, SecureRandom.hex].join('-')
upload_path = File.join(TMP_UPLOAD_PATH, id)
connection = ::Fog::Storage.new(self.object_store_credentials)
expire_at = Time.now + DIRECT_UPLOAD_TIMEOUT
options = { 'Content-Type' => 'application/octet-stream' }
{
ObjectID: id,
GetURL: connection.get_object_https_url(remote_store_path, upload_path, expire_at),
DeleteURL: connection.delete_object_url(remote_store_path, upload_path, expire_at),
StoreURL: connection.put_object_url(remote_store_path, upload_path, expire_at, options)
}
end
end
def file_storage?
......@@ -255,6 +294,18 @@ module ObjectStorage
}
end
def store_workhorse_file!(params, identifier)
filename = params["#{identifier}.name"]
if remote_object_id = params["#{identifier}.object_id"]
store_remote_file!(remote_object_id, filename)
elsif local_path = params["#{identifier}.path"]
store_local_file!(local_path, filename)
else
raise ArgumentError, 'Bad file'
end
end
private
def schedule_background_upload?
......@@ -264,6 +315,33 @@ module ObjectStorage
self.file_storage?
end
def store_remote_file!(remote_object_id, filename)
file_path = File.join(TMP_UPLOAD_PATH, remote_object_id)
raise ArgumentError, 'Bad file path' unless file_path.start_with?(TMP_UPLOAD_PATH + '/')
self.object_store = Store::REMOTE
self.original_filename = filename
CarrierWave::Storage::Fog::File.new(self, storage, file_path).tap do |file|
raise ArgumentError, 'Missing file' unless file.exists?
storage.store!(file)
end
end
def store_local_file!(local_path, filename)
root_path = File.realpath(self.class.workhorse_local_upload_path)
file_path = File.realpath(local_path)
raise ArgumentError, 'Bad file path' unless file_path.start_with?(root_path)
self.object_store = Store::LOCAL
self.original_filename = filename
File.open(local_path) do |file|
self.store!(file)
end
end
# this is a hack around CarrierWave. The #migrate method needs to be
# able to force the current file to the migrated file upon success.
def file=(file)
......
module StubConfiguration
def stub_object_storage_uploader(config:, uploader:, remote_directory:, enabled: true, licensed: true, background_upload: false)
def stub_object_storage_uploader(
config:, uploader:, remote_directory:, enabled: true, licensed: true,
background_upload: false, direct_upload: false
)
Fog.mock!
allow(config).to receive(:enabled) { enabled }
allow(config).to receive(:background_upload) { background_upload }
allow(config).to receive(:direct_upload) { direct_upload }
stub_licensed_features(object_storage: licensed) unless licensed == :skip
......
......@@ -53,14 +53,6 @@ module Gitlab
params
end
def lfs_upload_ok(oid, size)
{
StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
LfsOid: oid,
LfsSize: size
}
end
def artifact_upload_ok
{ TempPath: JobArtifactUploader.workhorse_upload_path }
end
......
......@@ -990,6 +990,7 @@ describe 'Git LFS API and storage' do
end
context 'and request is sent by gitlab-workhorse to authorize the request' do
shared_examples 'a valid response' do
before do
put_authorize
end
......@@ -1001,13 +1002,51 @@ describe 'Git LFS API and storage' do
it 'uses the gitlab-workhorse content type' do
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
shared_examples 'a local file' do
it_behaves_like 'a valid response' do
it 'responds with status 200, location of lfs store and object details' do
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['ObjectStore']).to be_nil
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
end
context 'when using local storage' do
it_behaves_like 'a local file'
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_lfs_object_storage(enabled: true, direct_upload: true)
end
it_behaves_like 'a valid response' do
it 'responds with status 200, location of lfs remote store and object details' do
expect(json_response['TempPath']).to be_nil
expect(json_response['ObjectStore']).to have_key('ObjectID')
expect(json_response['ObjectStore']).to have_key('GetURL')
expect(json_response['ObjectStore']).to have_key('StoreURL')
expect(json_response['ObjectStore']).to have_key('DeleteURL')
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
end
context 'when direct upload is disabled' do
before do
stub_lfs_object_storage(enabled: true, direct_upload: false)
end
it_behaves_like 'a local file'
end
end
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
......@@ -1037,6 +1076,59 @@ describe 'Git LFS API and storage' do
end
context 'with object storage enabled' do
context 'and direct upload enabled' do
let!(:fog_connection) do
stub_lfs_object_storage(direct_upload: true)
end
['123123', '../../123123'].each do |object_id|
context "with invalid object_id: #{object_id}" do
subject do
put_finalize_with_args('file.object_id' => object_id)
end
it 'responds with status 403' do
subject
expect(response).to have_gitlab_http_status(403)
end
end
end
context 'with valid object_id' do
before do
fog_connection.directories.get('lfs-objects').files.create(
key: 'tmp/upload/12312300',
body: 'content'
)
end
subject do
put_finalize_with_args('file.object_id' => '12312300')
end
it 'responds with status 200' do
subject
expect(response).to have_gitlab_http_status(200)
end
it 'schedules migration of file to object storage' do
subject
expect(LfsObject.last.projects).to include(project)
end
it 'have valid file' do
subject
expect(LfsObject.last.file_store).to eq(ObjectStorage::Store::REMOTE)
expect(LfsObject.last.file).to be_exists
end
end
end
context 'and background upload enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
......@@ -1048,6 +1140,7 @@ describe 'Git LFS API and storage' do
end
end
end
end
context 'and project has limit enabled but will stay under the limit' do
before do
......@@ -1064,13 +1157,12 @@ describe 'Git LFS API and storage' do
end
context 'invalid tempfiles' do
it 'rejects slashes in the tempfile name (path traversal' do
put_finalize('foo/bar')
expect(response).to have_gitlab_http_status(403)
before do
lfs_object.destroy
end
it 'rejects tempfile names that do not start with the oid' do
put_finalize("foo#{sample_oid}")
it 'rejects slashes in the tempfile name (path traversal)' do
put_finalize('../bar', with_tempfile: true)
expect(response).to have_gitlab_http_status(403)
end
end
......@@ -1160,7 +1252,7 @@ describe 'Git LFS API and storage' do
end
it 'with location of lfs store and object details' do
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
......@@ -1263,10 +1355,23 @@ describe 'Git LFS API and storage' do
end
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
setup_tempfile(lfs_tmp) if with_tempfile
upload_path = LfsObjectUploader.workhorse_local_upload_path
file_path = upload_path + '/' + lfs_tmp if lfs_tmp
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
if with_tempfile
FileUtils.mkdir_p(upload_path)
FileUtils.touch(file_path)
end
args = {
'file.path' => file_path
}.compact
put_finalize_with_args(args)
end
def put_finalize_with_args(args)
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", args, headers
end
def lfs_tmp_file
......@@ -1274,10 +1379,6 @@ describe 'Git LFS API and storage' do
end
def setup_tempfile(lfs_tmp)
upload_path = LfsObjectUploader.workhorse_upload_path
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment