Commit 92794ed0 authored by George Koltsov's avatar George Koltsov

Import LFS Objects as part of GitLab Migration

 - Add LFS Objects to GitLab Project Migration in order
   to have closer feature parity with file-based
   Import/Export
parent 966448af
......@@ -26,6 +26,7 @@ RSpec.describe BulkImports::Projects::Stage do
[4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
[5, BulkImports::Common::Pipelines::LfsObjectsPipeline],
[5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
[5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]
......
# frozen_string_literal: true
module BulkImports
module Common
module Pipelines
class LfsObjectsPipeline
include Pipeline
def extract(_context)
download_service.execute
decompression_service.execute
extraction_service.execute
file_paths = Dir.glob(File.join(tmpdir, '*'))
BulkImports::Pipeline::ExtractedData.new(data: file_paths)
end
# rubocop: disable CodeReuse/ActiveRecord
def load(_context, file_path)
Gitlab::Utils.check_path_traversal!(file_path)
Gitlab::Utils.check_allowed_absolute_path!(file_path, [Dir.tmpdir])
return if tar_filepath?(file_path)
return if lfs_json_filepath?(file_path)
return if File.directory?(file_path)
return if File.lstat(file_path).symlink?
size = File.size(file_path)
oid = LfsObject.calculate_oid(file_path)
lfs_object = LfsObject.find_or_initialize_by(oid: oid, size: size)
lfs_object.file = File.open(file_path) unless lfs_object.file&.exists?
lfs_object.save! if lfs_object.changed?
repository_types(oid)&.each do |type|
create_lfs_objects_project(lfs_object, type)
end
end
# rubocop: enable CodeReuse/ActiveRecord
def after_run(_)
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
private
def download_service
BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: context.entity.relation_download_url_path(relation),
tmpdir: tmpdir,
filename: targz_filename
)
end
def decompression_service
BulkImports::FileDecompressionService.new(tmpdir: tmpdir, filename: targz_filename)
end
def extraction_service
BulkImports::ArchiveExtractionService.new(tmpdir: tmpdir, filename: tar_filename)
end
def lfs_json
@lfs_json ||= Gitlab::Json.parse(File.read(lfs_json_filepath))
rescue StandardError
raise BulkImports::Error, 'LFS Objects JSON read failed'
end
def tmpdir
@tmpdir ||= Dir.mktmpdir('bulk_imports')
end
def relation
BulkImports::FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION
end
def tar_filename
"#{relation}.tar"
end
def targz_filename
"#{tar_filename}.gz"
end
def lfs_json_filepath?(file_path)
file_path == lfs_json_filepath
end
def tar_filepath?(file_path)
File.join(tmpdir, tar_filename) == file_path
end
def lfs_json_filepath
File.join(tmpdir, "#{relation}.json")
end
def create_lfs_objects_project(lfs_object, repository_type)
return unless allowed_repository_types.include?(repository_type)
lfs_objects_project = LfsObjectsProject.create(
project: portable,
lfs_object: lfs_object,
repository_type: repository_type
)
return if lfs_objects_project.persisted?
logger.warn(
project_id: portable.id,
message: 'Failed to save lfs objects project',
errors: lfs_objects_project.errors.full_messages.to_sentence,
**Gitlab::ApplicationContext.current
)
end
def repository_types(oid)
types = lfs_json[oid]
return [] unless types
return [] unless types.is_a?(Array)
# only return allowed repository types
types.uniq & allowed_repository_types
end
def allowed_repository_types
@allowed_repository_types ||= LfsObjectsProject.repository_types.values.push(nil)
end
end
end
end
end
......@@ -87,6 +87,10 @@ module BulkImports
pipeline: BulkImports::Common::Pipelines::UploadsPipeline,
stage: 5
},
lfs_objects: {
pipeline: BulkImports::Common::Pipelines::LfsObjectsPipeline,
stage: 5
},
auto_devops: {
pipeline: BulkImports::Projects::Pipelines::AutoDevopsPipeline,
stage: 5
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do
let_it_be(:portable) { create(:project) }
let_it_be(:oid) { 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' }
let(:tmpdir) { Dir.mktmpdir }
let(:entity) { create(:bulk_import_entity, :project_entity, project: portable, source_full_path: 'test') }
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:lfs_dir_path) { tmpdir }
let(:lfs_json_file_path) { File.join(lfs_dir_path, 'lfs_objects.json')}
let(:lfs_file_path) { File.join(lfs_dir_path, oid)}
subject(:pipeline) { described_class.new(context) }
before do
FileUtils.mkdir_p(lfs_dir_path)
FileUtils.touch(lfs_json_file_path)
FileUtils.touch(lfs_file_path)
File.write(lfs_json_file_path, { oid => [0, 1, 2, nil] }.to_json )
allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
end
after do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
describe '#run' do
it 'imports lfs objects into destination project and removes tmpdir' do
allow(pipeline)
.to receive(:extract)
.and_return(BulkImports::Pipeline::ExtractedData.new(data: [lfs_json_file_path, lfs_file_path]))
pipeline.run
expect(portable.lfs_objects.count).to eq(1)
expect(portable.lfs_objects_projects.count).to eq(4)
expect(Dir.exist?(tmpdir)).to eq(false)
end
end
describe '#extract' do
it 'downloads & extracts lfs objects filepaths' do
download_service = instance_double("BulkImports::FileDownloadService")
decompression_service = instance_double("BulkImports::FileDecompressionService")
extraction_service = instance_double("BulkImports::ArchiveExtractionService")
expect(BulkImports::FileDownloadService)
.to receive(:new)
.with(
configuration: context.configuration,
relative_url: "/#{entity.pluralized_name}/test/export_relations/download?relation=lfs_objects",
tmpdir: tmpdir,
filename: 'lfs_objects.tar.gz')
.and_return(download_service)
expect(BulkImports::FileDecompressionService).to receive(:new).with(tmpdir: tmpdir, filename: 'lfs_objects.tar.gz').and_return(decompression_service)
expect(BulkImports::ArchiveExtractionService).to receive(:new).with(tmpdir: tmpdir, filename: 'lfs_objects.tar').and_return(extraction_service)
expect(download_service).to receive(:execute)
expect(decompression_service).to receive(:execute)
expect(extraction_service).to receive(:execute)
extracted_data = pipeline.extract(context)
expect(extracted_data.data).to contain_exactly(lfs_json_file_path, lfs_file_path)
end
end
describe '#load' do
before do
allow(pipeline)
.to receive(:extract)
.and_return(BulkImports::Pipeline::ExtractedData.new(data: [lfs_json_file_path, lfs_file_path]))
end
context 'when file path is lfs json' do
it 'returns' do
filepath = File.join(tmpdir, 'lfs_objects.json')
allow(Gitlab::Json).to receive(:parse).with(filepath).and_return({})
expect { pipeline.load(context, filepath) }.not_to change { portable.lfs_objects.count }
end
end
context 'when file path is tar file' do
it 'returns' do
filepath = File.join(tmpdir, 'lfs_objects.tar')
expect { pipeline.load(context, filepath) }.not_to change { portable.lfs_objects.count }
end
end
context 'when lfs json read failed' do
it 'raises an error' do
File.write(lfs_json_file_path, 'invalid json')
expect { pipeline.load(context, lfs_file_path) }.to raise_error(BulkImports::Error, 'LFS Objects JSON read failed')
end
end
context 'when file path is being traversed' do
it 'raises an error' do
expect { pipeline.load(context, File.join(tmpdir, '..')) }.to raise_error(Gitlab::Utils::PathTraversalAttackError, 'Invalid path')
end
end
context 'when file path is not under tmpdir' do
it 'returns' do
expect { pipeline.load(context, '/home/test.txt') }.to raise_error(StandardError, 'path /home/test.txt is not allowed')
end
end
context 'when file path is symlink' do
it 'returns' do
symlink = File.join(tmpdir, 'symlink')
FileUtils.ln_s(File.join(tmpdir, lfs_file_path), symlink)
expect { pipeline.load(context, symlink) }.not_to change { portable.lfs_objects.count }
end
end
context 'when path is a directory' do
it 'returns' do
expect { pipeline.load(context, Dir.tmpdir) }.not_to change { portable.lfs_objects.count }
end
end
context 'lfs objects project' do
context 'when lfs objects json is invalid' do
context 'when oid value is not Array' do
it 'does not create lfs objects project' do
File.write(lfs_json_file_path, { oid => 'test' }.to_json )
expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
end
end
context 'when oid value is nil' do
it 'does not create lfs objects project' do
File.write(lfs_json_file_path, { oid => nil }.to_json )
expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
end
end
context 'when oid value is not allowed' do
it 'does not create lfs objects project' do
File.write(lfs_json_file_path, { oid => ['invalid'] }.to_json )
expect { pipeline.load(context, lfs_file_path) }.not_to change { portable.lfs_objects_projects.count }
end
end
context 'when repository type is duplicated' do
it 'creates only one lfs objects project' do
File.write(lfs_json_file_path, { oid => [0, 0, 1, 1, 2, 2] }.to_json )
expect { pipeline.load(context, lfs_file_path) }.to change { portable.lfs_objects_projects.count }.by(3)
end
end
end
context 'when lfs objects project fails to be created' do
it 'logs the failure' do
allow_next_instance_of(LfsObjectsProject) do |object|
allow(object).to receive(:persisted?).and_return(false)
end
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:warn)
.with(project_id: portable.id,
message: 'Failed to save lfs objects project',
errors: '', **Gitlab::ApplicationContext.current)
.exactly(4).times
end
pipeline.load(context, lfs_file_path)
end
end
end
end
describe '#after_run' do
it 'removes tmpdir' do
allow(FileUtils).to receive(:remove_entry).and_call_original
expect(FileUtils).to receive(:remove_entry).with(tmpdir).and_call_original
pipeline.after_run(nil)
expect(Dir.exist?(tmpdir)).to eq(false)
end
context 'when tmpdir does not exist' do
it 'does not attempt to remove tmpdir' do
FileUtils.remove_entry(tmpdir)
expect(FileUtils).not_to receive(:remove_entry).with(tmpdir)
pipeline.after_run(nil)
end
end
end
end
......@@ -26,6 +26,7 @@ RSpec.describe BulkImports::Projects::Stage do
[4, BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline],
[5, BulkImports::Common::Pipelines::WikiPipeline],
[5, BulkImports::Common::Pipelines::UploadsPipeline],
[5, BulkImports::Common::Pipelines::LfsObjectsPipeline],
[5, BulkImports::Projects::Pipelines::AutoDevopsPipeline],
[5, BulkImports::Projects::Pipelines::PipelineSchedulesPipeline],
[6, BulkImports::Common::Pipelines::EntityFinisher]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment