Commit d1c3d4b5 authored by Micaël Bergeron's avatar Micaël Bergeron

enabled the background upload for all uploaders - closes #4704

parent 53bef165
class Appearance < ActiveRecord::Base
include CacheMarkdownField
include AfterCommitQueue
include ObjectStorage::BackgroundUpload
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
......
......@@ -11,14 +11,6 @@ module Ci
mount_uploader :file, JobArtifactUploader
after_save if: :file_changed?, on: [:create, :update] do
run_after_commit do
file.schedule_migration_to_object_storage
end
end
delegate :open, :exists?, to: :file
enum file_type: {
archive: 1,
metadata: 2,
......
......@@ -3,6 +3,7 @@ module Avatarable
included do
prepend ShadowMethods
include ObjectStorage::BackgroundUpload
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
......
......@@ -7,16 +7,8 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
mount_uploader :file, LfsObjectUploader
after_save if: :file_changed?, on: [:create, :update] do
run_after_commit do
file.schedule_migration_to_object_storage
end
end
def project_allowed_access?(project)
projects.exists?(project.lfs_storage_project.id)
end
......
......@@ -17,6 +17,10 @@ class FileUploader < GitlabUploader
after :remove, :prune_store_dir
# FileUploader do not run in a model transaction, so we can simply
# enqueue a job after the :store hook.
after :store, :schedule_background_upload
def self.root
File.join(options.storage_path, 'uploads')
end
......
......@@ -121,6 +121,11 @@
- geo:geo_repositories_clean_up
- geo:geo_repository_destroy
- object_storage:object_storage_background_upload
- object_storage:object_storage_migrate_for_artifacts
- object_storage:object_storage_migrate_for_lfs_objects
- object_storage:object_storage_migrate_for_uploads
- admin_emails
- elastic_batch_project_indexer
- elastic_commit_indexer
......@@ -131,7 +136,6 @@
- geo_project_sync
- geo_repository_shard_sync
- ldap_group_sync
- object_storage_upload
- project_update_repository_storage
- rebase
- repository_update_mirror
......
......@@ -84,4 +84,4 @@
- [elastic_indexer, 1]
- [elastic_commit_indexer, 1]
- [export_csv, 1]
- [object_storage_upload, 1]
- [object_storage, 1]
......@@ -14,6 +14,8 @@ module EE
DAST_FILE = 'gl-dast-report.json'.freeze
included do
include ObjectStorage::BackgroundUpload
scope :codequality, -> { where(name: %w[codequality codeclimate]) }
scope :performance, -> { where(name: %w[performance deploy]) }
scope :sast, -> { where(name: 'sast') }
......
......@@ -7,6 +7,8 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundUpload
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
......
......@@ -7,7 +7,11 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundUpload
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
end
def local_store?
......
......@@ -2,6 +2,10 @@ module EE
module Note
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundUpload
end
def for_epic?
noteable.is_a?(Epic)
end
......
......@@ -50,6 +50,15 @@ module ObjectStorage
super
end
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorage::BackgroundUploadWorker.perform_async(self.class.name,
upload.class.to_s,
mounted_as,
upload.id)
end
private
def current_upload_satisfies?(paths, model)
......@@ -63,6 +72,23 @@ module ObjectStorage
end
end
module BackgroundUpload
extend ActiveSupport::Concern
def background_upload(mount_point)
run_after_commit { send(mount_point).schedule_background_upload }
end
included do
after_save on: [:create, :update] do
self.class.uploaders.each do |mount, uploader_class|
mounted_as = uploader_class.serialization_column(self.class, mount)
background_upload(mount) if send(:"#{mounted_as}_changed?")
end
end
end
end
module Concern
extend ActiveSupport::Concern
......@@ -97,6 +123,10 @@ module ObjectStorage
def licensed?
License.feature_available?(:object_storage)
end
def serialization_column(model_class, mount_point)
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
end
def file_storage?
......@@ -183,13 +213,13 @@ module ObjectStorage
raise e
end
def schedule_migration_to_object_storage(*args)
return unless self.class.object_store_enabled?
return unless self.class.background_upload_enabled?
return unless self.class.licensed?
return unless self.file_storage?
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
ObjectStorage::BackgroundUploadWorker.perform_async(self.class.name,
model.class.name,
mounted_as,
model.id)
end
def fog_directory
......@@ -231,6 +261,13 @@ module ObjectStorage
private
def schedule_background_upload?
self.class.object_store_enabled? &&
self.class.background_upload_enabled? &&
self.class.licensed? &&
self.file_storage?
end
# this is a hack around CarrierWave. The #migrate method needs to be
# able to force the current file to the migrated file upon success.
def file=(file)
......@@ -238,7 +275,7 @@ module ObjectStorage
end
def serialization_column
model.class.uploader_options.dig(mounted_as, :mount_on) || mounted_as
self.class.serialization_column(model.class, mounted_as)
end
# Returns the column where the 'store' is saved
......
# Concern for setting Sidekiq settings for the various GitLab ObjectStorage workers.
module ObjectStorageQueue
extend ActiveSupport::Concern
included do
queue_namespace :object_storage
end
end
module ObjectStorage
class BackgroundUploadWorker
include ApplicationWorker
include ObjectStorageQueue
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
subject = subject_class.find(subject_id)
uploader = build_uploader(subject, file_field&.to_sym)
uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
def build_uploader(subject, mount_point)
case subject
when Upload then subject.build_uploader(mount_point)
else
subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
end
end
end
end
class ObjectStorageUploadWorker
include ApplicationWorker
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
end
......@@ -34,7 +34,7 @@ describe LfsObject do
end
end
describe '#schedule_migration_to_object_storage' do
describe '#schedule_background_upload' do
before do
stub_lfs_setting(enabled: true)
end
......@@ -47,7 +47,7 @@ describe LfsObject do
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
subject
end
......@@ -61,7 +61,7 @@ describe LfsObject do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
subject
end
......@@ -73,7 +73,7 @@ describe LfsObject do
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
subject
end
......@@ -86,7 +86,7 @@ describe LfsObject do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
subject
end
......
require 'spec_helper'
describe ObjectStorageUploadWorker do
describe ObjectStorage::BackgroundUploadWorker do
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
......
......@@ -6,4 +6,17 @@ FactoryBot.define do
description "Open source software to collaborate on code"
new_project_guidelines "Custom project guidelines"
end
trait :with_logo do
logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_header_logo do
header_logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_logos do
with_logo
with_header_logo
end
end
......@@ -18,14 +18,14 @@ describe Ci::JobArtifact do
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
describe '#schedule_migration_to_object_storage' do
describe '#schedule_background_upload' do
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
subject
end
......@@ -39,7 +39,7 @@ describe Ci::JobArtifact do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject
end
......@@ -51,7 +51,7 @@ describe Ci::JobArtifact do
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
subject
end
......@@ -64,7 +64,7 @@ describe Ci::JobArtifact do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
subject
end
......
......@@ -1030,7 +1030,7 @@ describe 'Git LFS API and storage' do
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
......@@ -1042,7 +1042,7 @@ describe 'Git LFS API and storage' do
end
it 'schedules migration of file to object storage' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
......
......@@ -26,7 +26,7 @@ describe LfsObjectUploader do
describe 'migration to object storage' do
context 'with object storage disabled' do
it "is skipped" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
lfs_object
end
......@@ -38,7 +38,7 @@ describe LfsObjectUploader do
end
it 'is scheduled to run after creation' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
lfs_object
end
......@@ -50,7 +50,7 @@ describe LfsObjectUploader do
end
it 'is skipped' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
lfs_object
end
......@@ -67,7 +67,7 @@ describe LfsObjectUploader do
end
it 'can store file remotely' do
allow(ObjectStorageUploadWorker).to receive(:perform_async)
allow(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async)
store_file(lfs_object)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment