Commit cbd09793 authored by Micaël Bergeron's avatar Micaël Bergeron

apply feedback

mainly renaming `object_storage:object_storage_background_upload` to `*_move`
parent 2aec9d32
class Appearance < ActiveRecord::Base
include CacheMarkdownField
include AfterCommitQueue
include ObjectStorage::BackgroundUpload
include ObjectStorage::BackgroundMove
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
......
......@@ -3,7 +3,7 @@ module Avatarable
included do
prepend ShadowMethods
include ObjectStorage::BackgroundUpload
include ObjectStorage::BackgroundMove
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
......
......@@ -122,7 +122,7 @@
- geo:geo_repository_destroy
- object_storage_upload
- object_storage:object_storage_background_upload
- object_storage:object_storage_background_move
- object_storage:object_storage_migrate_uploads
- admin_emails
......
......@@ -14,7 +14,7 @@ module EE
DAST_FILE = 'gl-dast-report.json'.freeze
included do
include ObjectStorage::BackgroundUpload
include ObjectStorage::BackgroundMove
scope :codequality, -> { where(name: %w[codequality codeclimate]) }
scope :performance, -> { where(name: %w[performance deploy]) }
......
......@@ -7,7 +7,7 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundUpload
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event
......
......@@ -7,7 +7,7 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundUpload
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event
......
......@@ -3,7 +3,7 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundUpload
include ObjectStorage::BackgroundMove
end
def for_epic?
......
......@@ -53,7 +53,7 @@ module ObjectStorage
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorage::BackgroundUploadWorker.perform_async(self.class.name,
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
upload.class.to_s,
mounted_as,
upload.id)
......@@ -74,7 +74,7 @@ module ObjectStorage
# Add support for automatic background uploading after the file is stored.
#
module BackgroundUpload
module BackgroundMove
extend ActiveSupport::Concern
def background_upload(mount_points = [])
......@@ -226,7 +226,7 @@ module ObjectStorage
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorage::BackgroundUploadWorker.perform_async(self.class.name,
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
model.class.name,
mounted_as,
model.id)
......
module ObjectStorage
class BackgroundUploadWorker
class BackgroundMoveWorker
include ApplicationWorker
include ObjectStorageQueue
......@@ -10,13 +10,13 @@ module ObjectStorage
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = build_uploader(subject, file_field&.to_sym)
uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
def build_uploader(subject, mount_point)
......
......@@ -99,10 +99,6 @@ module ObjectStorage
def success?
error.nil?
end
def to_s
success? ? "Migration sucessful." : "Error while migrating #{upload.id}: #{error.message}"
end
end
module Report
......@@ -176,7 +172,7 @@ module ObjectStorage
report!(results)
rescue SanityCheckError => e
# do not retry: the job is insane
Rails.logger.warn "UploadsToObjectStorage sanity check error: #{e.message}"
Rails.logger.warn "#{self.class}: Sanity check error (#{e.message})"
end
def sanity_check!(uploads)
......@@ -192,13 +188,12 @@ module ObjectStorage
end
def process_uploader(uploader)
result = MigrationResult.new(uploader.upload)
MigrationResult.new(uploader.upload).tap do |result|
begin
uploader.migrate!(@to_store)
result
rescue => e
result.error = e
result
end
end
end
end
......
......@@ -18,8 +18,5 @@ class ObjectStorageUploadWorker
subject = subject_class.find(subject_id)
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
end
......@@ -5,24 +5,29 @@ namespace :gitlab do
batch_size = ENV.fetch('BATCH', 200).to_i
@to_store = ObjectStorage::Store::REMOTE
@mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
uploader_class = args.uploader_class.constantize
model_class = args.model_class.constantize
@uploader_class = args.uploader_class.constantize
@model_class = args.model_class.constantize
Upload
.where.not(store: @to_store)
.where(uploader: uploader_class.to_s,
model_type: model_class.base_class.sti_name)
.in_batches(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
uploads.each_batch(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
end
def enqueue_batch(batch)
def enqueue_batch(batch, index)
job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
@mounted_as,
@to_store)
puts "Enqueued job: #{job}"
puts "Enqueued job ##{index}: #{job}"
rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
# continue for the next batch
puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
end
def uploads
Upload.class_eval { include EachBatch } unless Upload < EachBatch
Upload
.where.not(store: @to_store)
.where(uploader: @uploader_class.to_s,
model_type: @model_class.base_class.sti_name)
end
end
end
......@@ -47,7 +47,7 @@ describe LfsObject do
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -61,7 +61,7 @@ describe LfsObject do
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
subject
end
......@@ -73,7 +73,7 @@ describe LfsObject do
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -86,7 +86,7 @@ describe LfsObject do
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......
require 'spec_helper'
describe ObjectStorage::BackgroundMoveWorker do
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
end
context 'for LFS' do
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
let(:uploader_class) { LfsObjectUploader }
let(:subject_class) { LfsObject }
let(:file_field) { :file }
let(:subject_id) { lfs_object.id }
context 'when object storage is enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'uploads object to storage' do
expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
context 'when background upload is disabled' do
before do
allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
end
it 'is skipped' do
expect { perform }.not_to change { lfs_object.reload.file_store }
end
end
end
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
perform
expect(lfs_object.reload.file_store).to eq(local)
end
end
end
context 'for legacy artifacts' do
let(:build) { create(:ci_build, :legacy_artifacts) }
let(:uploader_class) { LegacyArtifactUploader }
let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file }
let(:subject_id) { build.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(build.reload.artifacts_file_store).to eq(remote)
end
context 'for artifacts_metadata' do
let(:file_field) { :artifacts_metadata }
it 'migrates metadata to remote storage' do
perform
expect(build.reload.artifacts_metadata_store).to eq(remote)
end
end
end
end
end
context 'for job artifacts' do
let(:artifact) { create(:ci_job_artifact, :archive) }
let(:uploader_class) { JobArtifactUploader }
let(:subject_class) { Ci::JobArtifact }
let(:file_field) { :file }
let(:subject_id) { artifact.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(artifact.reload.file_store).to eq(remote)
end
end
end
end
context 'for uploads' do
let!(:project) { create(:project, :with_avatar) }
let(:uploader_class) { AvatarUploader }
let(:file_field) { :avatar }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_uploads_object_storage(uploader_class, background_upload: true)
end
describe 'supports using the model' do
let(:subject_class) { project.class }
let(:subject_id) { project.id }
it "migrates file to remote storage" do
perform
expect(project.reload.avatar.file_storage?).to be_falsey
end
end
describe 'supports using the Upload' do
let(:subject_class) { Upload }
let(:subject_id) { project.avatar.upload.id }
it "migrates file to remote storage" do
perform
expect(project.reload.avatar.file_storage?).to be_falsey
end
end
end
end
end
end
......@@ -75,23 +75,25 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
# swallow
end
# rubocop:disable Style/MultilineIfModifier
shared_examples 'outputs correctly' do |success: 0, failures: 0|
total = success + failures
if success
it 'outputs the reports' do
expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files})
perform
end if success > 0
end
end
if failures
it 'outputs upload failures' do
expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/)
perform
end if failures > 0
end
# rubocop:enable Style/MultilineIfModifier
end
end
it_behaves_like 'outputs correctly', success: 10
......
......@@ -25,7 +25,7 @@ describe Ci::JobArtifact do
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -39,7 +39,7 @@ describe Ci::JobArtifact do
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject
end
......@@ -51,7 +51,7 @@ describe Ci::JobArtifact do
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -64,7 +64,7 @@ describe Ci::JobArtifact do
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......
......@@ -1030,7 +1030,7 @@ describe 'Git LFS API and storage' do
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
......@@ -1042,7 +1042,7 @@ describe 'Git LFS API and storage' do
end
it 'schedules migration of file to object storage' do
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
......
......@@ -26,7 +26,7 @@ describe LfsObjectUploader do
describe 'migration to object storage' do
context 'with object storage disabled' do
it "is skipped" do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object
end
......@@ -38,7 +38,7 @@ describe LfsObjectUploader do
end
it 'is scheduled to run after creation' do
expect(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
lfs_object
end
......@@ -50,7 +50,7 @@ describe LfsObjectUploader do
end
it 'is skipped' do
expect(ObjectStorage::BackgroundUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object
end
......@@ -67,7 +67,7 @@ describe LfsObjectUploader do
end
it 'can store file remotely' do
allow(ObjectStorage::BackgroundUploadWorker).to receive(:perform_async)
allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
store_file(lfs_object)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment