Commit bc760627 authored by Douwe Maan's avatar Douwe Maan Committed by Kamil Trzciński

Merge branch 'jej/lfs-object-storage' into 'master'

Can migrate LFS objects to S3 style object storage

Closes #2841

See merge request !2760
parent 5a69b51b
module SendFileUpload
def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil)
if attachment
redirect_params[:query] = { "response-content-disposition" => "attachment;filename=#{attachment.inspect}" }
send_params.merge!(filename: attachment, disposition: 'attachment')
end
if file_upload.file_storage?
send_file file_upload.path, send_params
else
redirect_to file_upload.url(**redirect_params)
end
end
end
class Projects::ArtifactsController < Projects::ApplicationController
include ExtractsPath
include RendersBlob
include SendFileUpload
layout 'project'
before_action :authorize_read_build!
......@@ -10,11 +11,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
before_action :entry, only: [:file]
def download
if artifacts_file.file_storage?
send_file artifacts_file.path, disposition: 'attachment'
else
redirect_to artifacts_file.url
end
send_upload(artifacts_file, attachment: artifacts_file.filename)
end
def browse
......
class Projects::LfsStorageController < Projects::GitHttpClientController
include LfsRequest
include WorkhorseRequest
include SendFileUpload
skip_before_action :verify_workhorse_api!, only: [:download, :upload_finalize]
......@@ -11,7 +12,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
return
end
send_file lfs_object.file.path, content_type: "application/octet-stream"
send_upload(lfs_object.file, send_params: { content_type: "application/octet-stream" })
end
def upload_authorize
......
......@@ -2,6 +2,7 @@
class Projects::RawController < Projects::ApplicationController
include ExtractsPath
include BlobHelper
include SendFileUpload
before_action :require_non_empty_project
before_action :assign_ref_vars
......@@ -31,7 +32,7 @@ class Projects::RawController < Projects::ApplicationController
lfs_object = find_lfs_object
if lfs_object && lfs_object.project_allowed_access?(@project)
send_file lfs_object.file.path, filename: @blob.name, disposition: 'attachment'
send_upload(lfs_object.file, attachment: @blob.name)
else
render_404
end
......
......@@ -33,6 +33,7 @@ module Ci
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, ArtifactUploader::LOCAL_STORE]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
......
......@@ -4,6 +4,8 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true
scope :with_files_stored_locally, ->() { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) }
mount_uploader :file, LfsObjectUploader
def storage_project(project)
......
class ArtifactUploader < ObjectStoreUploader
storage_options Gitlab.config.artifacts
def self.local_artifacts_store
def self.local_store_path
Gitlab.config.artifacts.path
end
def self.artifacts_upload_path
File.join(self.local_artifacts_store, 'tmp/uploads/')
end
def store_dir
if file_storage?
default_local_path
else
default_path
end
end
def cache_dir
File.join(self.class.local_artifacts_store, 'tmp/cache')
end
def work_dir
File.join(self.class.local_artifacts_store, 'tmp/work')
File.join(self.local_store_path, 'tmp/uploads/')
end
private
def default_local_path
File.join(self.class.local_artifacts_store, default_path)
end
def default_path
File.join(subject.created_at.utc.strftime('%Y_%m'), subject.project_id.to_s, subject.id.to_s)
end
......
class LfsObjectUploader < GitlabUploader
storage :file
class LfsObjectUploader < ObjectStoreUploader
storage_options Gitlab.config.lfs
after :store, :schedule_migration_to_object_storage
def store_dir
"#{Gitlab.config.lfs.storage_path}/#{model.oid[0, 2]}/#{model.oid[2, 2]}"
end
def cache_dir
"#{Gitlab.config.lfs.storage_path}/tmp/cache"
def self.local_store_path
Gitlab.config.lfs.storage_path
end
def filename
model.oid[4..-1]
subject.oid[4..-1]
end
def work_dir
File.join(Gitlab.config.lfs.storage_path, 'tmp', 'work')
private
def default_path
"#{subject.oid[0, 2]}/#{subject.oid[2, 2]}"
end
end
......@@ -20,6 +20,22 @@ class ObjectStoreUploader < GitlabUploader
def object_store_enabled?
object_store_options&.enabled
end
def background_upload_enabled?
object_store_options&.background_upload
end
def object_store_credentials
@object_store_credentials ||= object_store_options&.connection&.to_hash&.deep_symbolize_keys
end
def object_store_directory
object_store_options&.remote_directory
end
def local_store_path
raise NotImplementedError
end
end
attr_reader :subject, :field
......@@ -38,6 +54,14 @@ class ObjectStoreUploader < GitlabUploader
subject.public_send(:"#{field}_store=", value)
end
def store_dir
if file_storage?
default_local_path
else
default_path
end
end
def use_file
if file_storage?
return yield path
......@@ -85,6 +109,12 @@ class ObjectStoreUploader < GitlabUploader
end
end
def schedule_migration_to_object_storage(new_file)
if self.class.object_store_enabled? && licensed? && file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, subject.class.name, field, subject.id)
end
end
def fog_directory
self.class.object_store_options.remote_directory
end
......@@ -109,7 +139,27 @@ class ObjectStoreUploader < GitlabUploader
def verify_license!(new_file)
return if file_storage?
raise 'Object Storage feature is missing' unless subject.project.feature_available?(:object_storage)
raise 'Object Storage feature is missing' unless licensed?
end
def exists?
file.try(:exists?)
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
# Override this if you don't want to save local files by default to the Rails.root directory
def work_dir
# Default path set by CarrierWave:
# https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
# CarrierWave.tmp_path
File.join(self.class.local_store_path, 'tmp/work')
end
def licensed?
License.feature_available?(:object_storage)
end
private
......@@ -118,6 +168,14 @@ class ObjectStoreUploader < GitlabUploader
self.object_store = LOCAL_STORE unless self.object_store
end
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
raise NotImplementedError
end
def storage
@storage ||=
if object_store == REMOTE_STORE
......
class ObjectStorageUploadWorker
include Sidekiq::Worker
include DedicatedSidekiqQueue
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class.object_store_enabled?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
return unless file.licensed?
file.migrate!(uploader_class::REMOTE_STORE)
end
end
---
title: LFS files can be stored in remote object storage such as S3
merge_request: 2760
author:
type: added
......@@ -147,7 +147,8 @@ production: &base
# path: shared/artifacts
# object_store:
# enabled: false
# remote_directory: artifacts
# remote_directory: artifacts # The bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# connection:
# provider: AWS # Only AWS supported at the moment
# aws_access_key_id: AWS_ACCESS_KEY_ID
......@@ -159,6 +160,19 @@ production: &base
enabled: true
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
object_store:
enabled: false
remote_directory: lfs-objects # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
# Use the following options to configure an AWS compatible host
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
......@@ -655,6 +669,28 @@ test:
enabled: true
lfs:
enabled: false
# The location where LFS objects are stored (default: shared/lfs-objects).
# storage_path: shared/lfs-objects
object_store:
enabled: false
remote_directory: lfs-objects # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
artifacts:
enabled: true
# The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts
object_store:
enabled: false
remote_directory: artifacts # The bucket name
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
gitlab:
host: localhost
port: 80
......
......@@ -302,8 +302,9 @@ Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
# Convert upload connection settings to use symbol keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_symbolize_keys!
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
#
# Registry
......@@ -339,6 +340,13 @@ Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
Settings.lfs['object_store'] ||= Settingslogic.new({})
Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
Settings.lfs['object_store']['remote_directory'] ||= nil
Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
# Convert upload connection settings to use string keys, to make Fog happy
Settings.lfs['object_store']['connection']&.deep_stringify_keys!
#
# Mattermost
#
......
......@@ -63,3 +63,4 @@
- [update_user_activity, 1]
- [propagate_service_template, 1]
- [background_migration, 1]
- [object_storage_upload, 1]
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddFileStoreToLfsObjects < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
# DOWNTIME_REASON = ''
# When using the methods "add_concurrent_index", "remove_concurrent_index" or
# "add_column_with_default" you must disable the use of transactions
# as these methods can not run in an existing transaction.
# When using "add_concurrent_index" or "remove_concurrent_index" methods make sure
# that either of them is the _only_ method called in the migration,
# any other changes should go in a separate migration.
# This ensures that upon failure _only_ the index creation or removing fails
# and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
disable_ddl_transaction!
def up
add_column(:lfs_objects, :file_store, :integer)
end
def down
remove_column(:lfs_objects, :file_store)
end
end
......@@ -741,6 +741,7 @@ ActiveRecord::Schema.define(version: 20170905112933) do
t.datetime "created_at"
t.datetime "updated_at"
t.string "file"
t.integer "file_store"
end
add_index "lfs_objects", ["oid"], name: "index_lfs_objects_on_oid", unique: true, using: :btree
......
......@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
super('artifacts', ArtifactUploader.local_artifacts_store)
super('artifacts', ArtifactUploader.local_store_path)
end
def create_files_dir
......
......@@ -2,10 +2,12 @@ desc "GitLab | Migrate files for artifacts to comply with new storage format"
namespace :gitlab do
namespace :artifacts do
task migrate: :environment do
puts 'Artifacts'.color(:yellow)
Ci::Build.joins(:project).with_artifacts
.where(artifacts_file_store: ArtifactUploader::LOCAL_STORE)
.find_each(batch_size: 100) do |issue|
logger = Logger.new(STDOUT)
logger.info('Starting transfer of artifacts')
Ci::Build.joins(:project)
.with_artifacts_stored_locally
.find_each(batch_size: 10) do |build|
begin
build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE)
......
require 'logger'
desc "GitLab | Migrate LFS objects to remote storage"
namespace :gitlab do
namespace :lfs do
task migrate: :environment do
logger = Logger.new(STDOUT)
logger.info('Starting transfer of LFS files to object storage')
LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object|
begin
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e
logger.error("Failed to transfer LFS object #{lfs_object.oid} with error: #{e.message}")
end
end
end
end
end
......@@ -22,7 +22,7 @@ describe Projects::ArtifactsController do
describe 'GET download' do
it 'sends the artifacts file' do
expect(controller).to receive(:send_file).with(job.artifacts_file.path, disposition: 'attachment').and_call_original
expect(controller).to receive(:send_file).with(job.artifacts_file.path, hash_including(disposition: 'attachment')).and_call_original
get :download, namespace_id: project.namespace, project_id: project, job_id: job
end
......@@ -66,19 +66,52 @@ describe Projects::ArtifactsController do
describe 'GET raw' do
context 'when the file exists' do
it 'serves the file using workhorse' do
get :raw, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
let(:path) { 'ci_artifacts.txt' }
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline, artifacts_file_store: store, artifacts_metadata_store: store) }
send_data = response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
shared_examples 'a valid file' do
it 'serves the file using workhorse' do
subject
expect(send_data).to start_with('artifacts-entry:')
expect(send_data).to start_with('artifacts-entry:')
base64_params = send_data.sub(/\Aartifacts\-entry:/, '')
params = JSON.parse(Base64.urlsafe_decode64(base64_params))
expect(params.keys).to eq(%w(Archive Entry))
expect(params['Archive']).to start_with(archive_path)
# On object storage, the URL can end with a query string
expect(params['Archive']).to match(/build_artifacts.zip(\?[^?]+)?$/)
expect(params['Entry']).to eq(Base64.encode64('ci_artifacts.txt'))
end
def send_data
response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]
end
expect(params.keys).to eq(%w(Archive Entry))
expect(params['Archive']).to end_with('build_artifacts.zip')
expect(params['Entry']).to eq(Base64.encode64('ci_artifacts.txt'))
def params
@params ||= begin
base64_params = send_data.sub(/\Aartifacts\-entry:/, '')
JSON.parse(Base64.urlsafe_decode64(base64_params))
end
end
end
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
let(:archive_path) { ArtifactUploader.local_store_path }
end
end
context 'when using remote file storage' do
before do
stub_artifacts_object_storage
end
it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStorage::Store::REMOTE }
let(:archive_path) { 'https://' }
end
end
end
end
......
......@@ -8,10 +8,7 @@ describe Projects::RawController do
let(:id) { 'master/README.md' }
it 'delivers ASCII file' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
......@@ -25,10 +22,7 @@ describe Projects::RawController do
let(:id) { 'master/files/images/6049019_460s.jpg' }
it 'sets image content type header' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_http_status(200)
expect(response.header['Content-Type']).to eq('image/jpeg')
......@@ -54,21 +48,40 @@ describe Projects::RawController do
it 'serves the file' do
expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_http_status(200)
end
context 'and lfs uses object storage' do
before do
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
end
it 'responds with redirect to file' do
get_show(public_project, id)
expect(response).to have_gitlab_http_status(302)
expect(response.location).to include(lfs_object.reload.file.path)
end
it 'sets content disposition' do
get_show(public_project, id)
file_uri = URI.parse(response.location)
params = CGI.parse(file_uri.query)
expect(params["response-content-disposition"].first).to eq 'attachment;filename="lfs_object.iso"'
end
end
end
context 'when project does not have access' do
it 'does not serve the file' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_http_status(404)
end
......@@ -81,10 +94,7 @@ describe Projects::RawController do
end
it 'delivers ASCII file' do
get(:show,
namespace_id: public_project.namespace.to_param,
project_id: public_project,
id: id)
get_show(public_project, id)
expect(response).to have_http_status(200)
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
......@@ -95,4 +105,10 @@ describe Projects::RawController do
end
end
end
def get_show(project, id)
get(:show, namespace_id: project.namespace.to_param,
project_id: project,
id: id)
end
end
......@@ -17,8 +17,12 @@ describe API::Jobs do
let(:api_user) { user }
let(:reporter) { create(:project_member, :reporter, project: project).user }
let(:guest) { create(:project_member, :guest, project: project).user }
let(:cross_project_pipeline_enabled) { true }
let(:object_storage_enabled) { true }
before do
stub_licensed_features(cross_project_pipelines: cross_project_pipeline_enabled,
object_storage: object_storage_enabled)
project.add_developer(user)
end
......@@ -319,7 +323,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
stub_artifacts_object_storage
stub_artifacts_object_storage(licensed: :skip)
job.success
end
......
......@@ -190,10 +190,12 @@ describe 'Git LFS API and storage' do
describe 'when fetching lfs object' do
let(:project) { create(:project) }
let(:update_permissions) { }
let(:before_get) { }
before do
enable_lfs
update_permissions
before_get
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
end
......@@ -238,6 +240,21 @@ describe 'Git LFS API and storage' do
end
it_behaves_like 'responds with a file'
context 'when LFS uses object storage' do
let(:before_get) do
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
end
it 'responds with redirect' do
expect(response).to have_gitlab_http_status(302)
end
it 'responds with the file location' do
expect(response.location).to include(lfs_object.reload.file.path)
end
end
end
end
......@@ -944,6 +961,46 @@ describe 'Git LFS API and storage' do
end
end
context 'and workhorse requests upload finalize for a new lfs object' do
before do
allow_any_instance_of(LfsObjectUploader).to receive(:exists?) { false }
end
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
end
context 'with object storage enabled' do
before do
stub_lfs_object_storage
end
it 'schedules migration of file to object storage' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
end
end
context 'and project has limit enabled but will stay under the limit' do
before do
allow_any_instance_of(EE::Project).to receive_messages(
actual_size_limit: 200,
size_limit_enabled?: true)
put_finalize
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
end
context 'invalid tempfiles' do
it 'rejects slashes in the tempfile name (path traversal' do
put_finalize('foo/bar')
......@@ -1143,7 +1200,9 @@ describe 'Git LFS API and storage' do
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end
def put_finalize(lfs_tmp = lfs_tmp_file)
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false)
setup_tempfile(lfs_tmp) if with_tempfile
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp).compact
end
......@@ -1151,6 +1210,13 @@ describe 'Git LFS API and storage' do
def lfs_tmp_file
"#{sample_oid}012345678"
end
def setup_tempfile(lfs_tmp)
upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
end
end
def enable_lfs
......
module StubConfiguration
def stub_artifacts_object_storage(enabled: true)
Fog.mock!
allow(Gitlab.config.artifacts.object_store).to receive_messages(
enabled: enabled,
remote_directory: 'artifacts',
connection: {
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
region: 'eu-central-1'
}
)
allow_any_instance_of(ArtifactUploader).to receive(:verify_license!) { true }
return unless enabled
::Fog::Storage.new(Gitlab.config.artifacts.object_store.connection).tap do |connection|
begin
connection.directories.create(key: 'artifacts')
rescue Excon::Error::Conflict
end
end
end
end
module StubConfiguration
def stub_object_storage_uploader(config:, uploader:, remote_directory:, enabled: true, licensed: true)
Fog.mock!
allow(config).to receive(:enabled) { enabled }
stub_licensed_features(object_storage: licensed) unless licensed == :skip
return unless enabled
::Fog::Storage.new(uploader.object_store_credentials).tap do |connection|
begin
connection.directories.create(key: remote_directory)
rescue Excon::Error::Conflict
end
end
end
def stub_artifacts_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
uploader: ArtifactUploader,
remote_directory: 'artifacts',
**params)
end
def stub_lfs_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.lfs.object_store,
uploader: LfsObjectUploader,
remote_directory: 'lfs-objects',
**params)
end
end
require 'rake_helper'
describe 'gitlab:lfs namespace rake task' do
before :all do
Rake.application.rake_require 'tasks/gitlab/lfs'
end
describe 'migrate' do
let(:local) { ObjectStoreUploader::LOCAL_STORE }
let(:remote) { ObjectStoreUploader::REMOTE_STORE }
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
def lfs_migrate
run_rake_task('gitlab:lfs:migrate')
end
context 'object storage disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
expect { lfs_migrate }.not_to change { lfs_object.reload.file_store }
end
end
context 'object storage enabled' do
before do
stub_lfs_object_storage
end
it 'migrates local file to object storage' do
expect { lfs_migrate }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
end
end
end
......@@ -6,8 +6,8 @@ describe ArtifactUploader do
let(:uploader) { described_class.new(job, :artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path }
describe '.local_artifacts_store' do
subject { described_class.local_artifacts_store }
describe '.local_store_path' do
subject { described_class.local_store_path }
it "delegate to artifacts path" do
expect(Gitlab.config.artifacts).to receive(:path)
......
......@@ -2,7 +2,7 @@ require 'spec_helper'
describe LfsObjectUploader do
let(:lfs_object) { create(:lfs_object, :with_file) }
let(:uploader) { described_class.new(lfs_object) }
let(:uploader) { described_class.new(lfs_object, :file) }
let(:path) { Gitlab.config.lfs.storage_path }
describe '#move_to_cache' do
......@@ -37,4 +37,73 @@ describe LfsObjectUploader do
it { is_expected.to start_with(path) }
it { is_expected.to end_with('/tmp/work') }
end
describe 'migration to object storage' do
context 'with object storage disabled' do
it "is skipped" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
lfs_object
end
end
context 'with object storage enabled' do
before do
stub_lfs_object_storage
end
it 'is scheduled to run after creation' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
lfs_object
end
end
context 'with object storage unlicenced' do
before do
stub_lfs_object_storage(licensed: false)
end
it 'is skipped' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
lfs_object
end
end
end
describe 'remote file' do
let(:remote) { described_class::REMOTE_STORE }
let(:lfs_object) { create(:lfs_object, file_store: remote) }
context 'with object storage enabled' do
before do
stub_lfs_object_storage
end
it 'can store file remotely' do
allow(ObjectStorageUploadWorker).to receive(:perform_async)
store_file(lfs_object)
expect(lfs_object.file_store).to eq remote
expect(lfs_object.file.path).not_to be_blank
end
end
context 'with object storage unlicenced' do
before do
stub_lfs_object_storage(licensed: false)
end
it 'can not store file remotely' do
expect { store_file(lfs_object) }.to raise_error('Object Storage feature is missing')
end
end
end
def store_file(lfs_object)
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
end
end
......@@ -198,18 +198,15 @@ describe ObjectStoreUploader do
end
context 'when using remote storage' do
let(:project) { double }
before do
uploader_class.storage_options double(
object_store: double(enabled: true))
expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE }
expect(object).to receive(:project) { project }
end
context 'feature is not available' do
before do
expect(project).to receive(:feature_available?).with(:object_storage) { false }
expect(License).to receive(:feature_available?).with(:object_storage) { false }
end
it "does raise an error" do
......@@ -219,7 +216,7 @@ describe ObjectStoreUploader do
context 'feature is available' do
before do
expect(project).to receive(:feature_available?).with(:object_storage) { true }
expect(License).to receive(:feature_available?).with(:object_storage) { true }
end
it "does not raise an error" do
......
require 'spec_helper'
describe ObjectStorageUploadWorker do
let(:local) { ObjectStoreUploader::LOCAL_STORE }
let(:remote) { ObjectStoreUploader::REMOTE_STORE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
end
context 'for LFS' do
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
let(:uploader_class) { LfsObjectUploader }
let(:subject_class) { LfsObject }
let(:file_field) { :file }
let(:subject_id) { lfs_object.id }
context 'when object storage is enabled' do
before do
stub_lfs_object_storage
end
it 'uploads object to storage' do
expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
context 'when background upload is disabled' do
before do
allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
end
it 'is skipped' do
expect { perform }.not_to change { lfs_object.reload.file_store }
end
end
end
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
perform
expect(lfs_object.reload.file_store).to eq(local)
end
end
end
context 'for artifacts' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
let(:uploader_class) { ArtifactUploader }
let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file }
let(:subject_id) { job.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage
job
end
it "migrates file to remote storage" do
perform
expect(job.reload.artifacts_file_store).to eq(remote)
end
context 'for artifacts_metadata' do
let(:file_field) { :artifacts_metadata }
it 'migrates metadata to remote storage' do
perform
expect(job.reload.artifacts_metadata_store).to eq(remote)
end
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment