Commit c88b81ff authored by Stan Hu's avatar Stan Hu

Merge branch 'georgekoltsov/migrate-project-attributes' into 'master'

Add project attributes migration to GitLab Migration

See merge request gitlab-org/gitlab!75898
parents 17781208 24b21418
......@@ -6,6 +6,7 @@ module BulkImports
include Gitlab::Utils::StrongMemoize
UPLOADS_RELATION = 'uploads'
SELF_RELATION = 'self'
def initialize(portable)
@portable = portable
......@@ -28,7 +29,11 @@ module BulkImports
end
def portable_relations
tree_relations + file_relations - skipped_relations
tree_relations + file_relations + self_relation - skipped_relations
end
def self_relation?(relation)
relation == SELF_RELATION
end
def tree_relation?(relation)
......@@ -45,6 +50,10 @@ module BulkImports
portable_tree[:include].find { |include| include[relation.to_sym] }
end
def portable_relations_tree
@portable_relations_tree ||= attributes_finder.find_relations_tree(portable_class_sym).deep_stringify_keys
end
private
attr_reader :portable
......@@ -67,10 +76,6 @@ module BulkImports
@portable_class_sym ||= portable_class.to_s.demodulize.underscore.to_sym
end
def portable_relations_tree
@portable_relations_tree ||= attributes_finder.find_relations_tree(portable_class_sym).deep_stringify_keys
end
def import_export_yaml
raise NotImplementedError
end
......@@ -86,6 +91,10 @@ module BulkImports
def skipped_relations
[]
end
def self_relation
[SELF_RELATION]
end
end
end
end
......@@ -59,7 +59,7 @@ module BulkImports
end
def export_service
@export_service ||= if config.tree_relation?(relation)
@export_service ||= if config.tree_relation?(relation) || config.self_relation?(relation)
TreeExportService.new(portable, config.export_path, relation)
elsif config.file_relation?(relation)
FileExportService.new(portable, config.export_path, relation)
......
......@@ -10,6 +10,8 @@ module BulkImports
end
def execute
return serializer.serialize_root(config.class::SELF_RELATION) if self_relation?
relation_definition = config.tree_relation_definition_for(relation)
raise BulkImports::Error, 'Unsupported relation export type' unless relation_definition
......@@ -18,6 +20,8 @@ module BulkImports
end
def exported_filename
return "#{relation}.json" if self_relation?
"#{relation}.ndjson"
end
......@@ -39,5 +43,9 @@ module BulkImports
def json_writer
::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path)
end
def self_relation?
relation == config.class::SELF_RELATION
end
end
end
......@@ -7,6 +7,7 @@ RSpec.describe BulkImports::Projects::Stage do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Projects::Pipelines::RepositoryPipeline],
[1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
[2, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::MilestonesPipeline],
[2, BulkImports::Common::Pipelines::BadgesPipeline],
......
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class ProjectAttributesPipeline
include Pipeline
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
def extract(context)
download_service(tmp_dir, context).execute
decompression_service(tmp_dir).execute
project_attributes = json_decode(json_attributes)
BulkImports::Pipeline::ExtractedData.new(data: project_attributes)
end
def transform(_, data)
subrelations = config.portable_relations_tree.keys.map(&:to_s)
Gitlab::ImportExport::AttributeCleaner.clean(
relation_hash: data,
relation_class: Project,
excluded_keys: config.relation_excluded_keys(:project)
).except(*subrelations)
end
def load(_, data)
portable.assign_attributes(data)
portable.reconcile_shared_runners_setting!
portable.drop_visibility_level!
portable.save!
end
def after_run(_)
FileUtils.remove_entry(tmp_dir)
end
def json_attributes
@json_attributes ||= File.read(File.join(tmp_dir, filename))
end
private
def tmp_dir
@tmp_dir ||= Dir.mktmpdir
end
def config
@config ||= BulkImports::FileTransfer.config_for(portable)
end
def download_service(tmp_dir, context)
@download_service ||= BulkImports::FileDownloadService.new(
configuration: context.configuration,
relative_url: context.entity.relation_download_url_path(BulkImports::FileTransfer::BaseConfig::SELF_RELATION),
dir: tmp_dir,
filename: compressed_filename
)
end
def decompression_service(tmp_dir)
@decompression_service ||= BulkImports::FileDecompressionService.new(dir: tmp_dir, filename: compressed_filename)
end
def compressed_filename
"#{filename}.gz"
end
def filename
"#{BulkImports::FileTransfer::BaseConfig::SELF_RELATION}.json"
end
def json_decode(string)
Gitlab::Json.parse(string)
rescue JSON::ParserError => e
Gitlab::ErrorTracking.log_exception(e)
raise BulkImports::Error, 'Incorrect JSON format'
end
end
end
end
end
......@@ -15,6 +15,10 @@ module BulkImports
pipeline: BulkImports::Projects::Pipelines::RepositoryPipeline,
stage: 1
},
project_attributes: {
pipeline: BulkImports::Projects::Pipelines::ProjectAttributesPipeline,
stage: 1
},
labels: {
pipeline: BulkImports::Common::Pipelines::LabelsPipeline,
stage: 2
......
......@@ -40,6 +40,13 @@ module Gitlab
end
end
def serialize_root(exportable_path = @exportable_path)
attributes = exportable.as_json(
relations_schema.merge(include: nil, preloads: nil))
json_writer.write_attributes(exportable_path, attributes)
end
def serialize_relation(definition)
raise ArgumentError, 'definition needs to be Hash' unless definition.is_a?(Hash)
raise ArgumentError, 'definition needs to have exactly one Hash element' unless definition.one?
......@@ -60,12 +67,6 @@ module Gitlab
attr_reader :json_writer, :relations_schema, :exportable
def serialize_root
attributes = exportable.as_json(
relations_schema.merge(include: nil, preloads: nil))
json_writer.write_attributes(@exportable_path, attributes)
end
def serialize_many_relations(key, records, options)
enumerator = Enumerator.new do |items|
key_preloads = preloads&.dig(key)
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:tmpdir) { Dir.mktmpdir }
let(:extra) { {} }
let(:project_attributes) do
{
'description' => 'description',
'visibility_level' => 0,
'archived' => false,
'merge_requests_template' => 'test',
'merge_requests_rebase_enabled' => true,
'approvals_before_merge' => 0,
'reset_approvals_on_push' => true,
'merge_requests_ff_only_enabled' => true,
'issues_template' => 'test',
'shared_runners_enabled' => true,
'build_coverage_regex' => 'build_coverage_regex',
'build_allow_git_fetch' => true,
'build_timeout' => 3600,
'pending_delete' => false,
'public_builds' => true,
'last_repository_check_failed' => nil,
'only_allow_merge_if_pipeline_succeeds' => true,
'has_external_issue_tracker' => false,
'request_access_enabled' => true,
'has_external_wiki' => false,
'ci_config_path' => nil,
'only_allow_merge_if_all_discussions_are_resolved' => true,
'printing_merge_request_link_enabled' => true,
'auto_cancel_pending_pipelines' => 'enabled',
'service_desk_enabled' => false,
'delete_error' => nil,
'disable_overriding_approvers_per_merge_request' => true,
'resolve_outdated_diff_discussions' => true,
'jobs_cache_index' => nil,
'external_authorization_classification_label' => nil,
'pages_https_only' => false,
'merge_requests_author_approval' => false,
'merge_requests_disable_committers_approval' => true,
'require_password_to_approve' => true,
'remove_source_branch_after_merge' => true,
'autoclose_referenced_issues' => true,
'suggestion_commit_message' => 'Test!'
}.merge(extra)
end
subject(:pipeline) { described_class.new(context) }
before do
allow(Dir).to receive(:mktmpdir).and_return(tmpdir)
end
after do
FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir)
end
describe '#run' do
before do
allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_attributes))
pipeline.run
end
it 'imports project attributes', :aggregate_failures do
project_attributes.each_pair do |key, value|
expect(project.public_send(key)).to eq(value)
end
end
context 'when project is archived' do
let(:extra) { { 'archived' => true } }
it 'sets project as archived' do
expect(project.archived).to eq(true)
end
end
end
describe '#extract' do
before do
file_download_service = instance_double("BulkImports::FileDownloadService")
file_decompression_service = instance_double("BulkImports::FileDecompressionService")
expect(BulkImports::FileDownloadService)
.to receive(:new)
.with(
configuration: context.configuration,
relative_url: "/#{entity.pluralized_name}/#{entity.source_full_path}/export_relations/download?relation=self",
dir: tmpdir,
filename: 'self.json.gz')
.and_return(file_download_service)
expect(BulkImports::FileDecompressionService)
.to receive(:new)
.with(dir: tmpdir, filename: 'self.json.gz')
.and_return(file_decompression_service)
expect(file_download_service).to receive(:execute)
expect(file_decompression_service).to receive(:execute)
end
it 'downloads, decompresses & decodes json' do
allow(pipeline).to receive(:json_attributes).and_return("{\"test\":\"test\"}")
extracted_data = pipeline.extract(context)
expect(extracted_data.data).to match_array([{ 'test' => 'test' }])
end
context 'when json parsing error occurs' do
it 'raises an error' do
allow(pipeline).to receive(:json_attributes).and_return("invalid")
expect { pipeline.extract(context) }.to raise_error(BulkImports::Error)
end
end
end
describe '#transform' do
it 'removes prohibited attributes from hash' do
input = { 'description' => 'description', 'issues' => [], 'milestones' => [], 'id' => 5 }
expect(Gitlab::ImportExport::AttributeCleaner).to receive(:clean).and_call_original
expect(pipeline.transform(context, input)).to eq({ 'description' => 'description' })
end
end
describe '#load' do
it 'assigns attributes, drops visibility and reconciles shared runner setting' do
expect(project).to receive(:assign_attributes).with(project_attributes)
expect(project).to receive(:reconcile_shared_runners_setting!)
expect(project).to receive(:drop_visibility_level!)
expect(project).to receive(:save!)
pipeline.load(context, project_attributes)
end
end
describe '#json_attributes' do
it 'reads raw json from file' do
filepath = File.join(tmpdir, 'self.json')
FileUtils.touch(filepath)
expect_file_read(filepath)
pipeline.json_attributes
end
end
end
......@@ -9,6 +9,7 @@ RSpec.describe BulkImports::Projects::Stage do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Projects::Pipelines::RepositoryPipeline],
[1, BulkImports::Projects::Pipelines::ProjectAttributesPipeline],
[2, BulkImports::Common::Pipelines::LabelsPipeline],
[2, BulkImports::Common::Pipelines::MilestonesPipeline],
[2, BulkImports::Common::Pipelines::BadgesPipeline],
......
......@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe BulkImports::TreeExportService do
let_it_be(:project) { create(:project) }
let_it_be(:export_path) { Dir.mktmpdir }
let_it_be(:relation) { 'issues' }
let(:relation) { 'issues' }
subject(:service) { described_class.new(project, export_path, relation) }
......@@ -25,11 +26,31 @@ RSpec.describe BulkImports::TreeExportService do
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
end
context 'when relation is self' do
let(:relation) { 'self' }
it 'executes export on portable itself' do
expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
expect(serializer).to receive(:serialize_root)
end
subject.execute
end
end
end
describe '#exported_filename' do
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('issues.ndjson')
end
context 'when relation is self' do
let(:relation) { 'self' }
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('self.json')
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment