Commit 2915c8be authored by Kamil Trzciński's avatar Kamil Trzciński

Merge branch 'refactor-modules-for-import-export' into 'master'

Refactor import_export

Closes #207846

See merge request gitlab-org/gitlab!25623
parents 0b29e5a9 06801192
...@@ -599,7 +599,7 @@ module Ci ...@@ -599,7 +599,7 @@ module Ci
# Manually set the notes for a Ci::Pipeline # Manually set the notes for a Ci::Pipeline
# There is no ActiveRecord relation between Ci::Pipeline and notes # There is no ActiveRecord relation between Ci::Pipeline and notes
# as they are related to a commit sha. This method helps importing # as they are related to a commit sha. This method helps importing
# them using the +Gitlab::ImportExport::ProjectRelationFactory+ class. # them using the +Gitlab::ImportExport::Project::RelationFactory+ class.
def notes=(notes) def notes=(notes)
notes.each do |note| notes.each do |note|
note[:id] = nil note[:id] = nil
......
...@@ -37,7 +37,7 @@ module Groups ...@@ -37,7 +37,7 @@ module Groups
end end
def tree_exporter def tree_exporter
Gitlab::ImportExport::GroupTreeSaver.new(group: @group, current_user: @current_user, shared: @shared, params: @params) Gitlab::ImportExport::Group::TreeSaver.new(group: @group, current_user: @current_user, shared: @shared, params: @params)
end end
def file_saver def file_saver
......
...@@ -34,7 +34,7 @@ module Groups ...@@ -34,7 +34,7 @@ module Groups
end end
def restorer def restorer
@restorer ||= Gitlab::ImportExport::GroupTreeRestorer.new(user: @current_user, @restorer ||= Gitlab::ImportExport::Group::TreeRestorer.new(user: @current_user,
shared: @shared, shared: @shared,
group: @group, group: @group,
group_hash: nil) group_hash: nil)
......
...@@ -54,7 +54,7 @@ module Projects ...@@ -54,7 +54,7 @@ module Projects
end end
def project_tree_saver def project_tree_saver
Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: current_user, shared: shared, params: params) Gitlab::ImportExport::Project::TreeSaver.new(project: project, current_user: current_user, shared: shared, params: params)
end end
def uploads_saver def uploads_saver
......
...@@ -81,7 +81,7 @@ The last option is to import a project using a Rails console: ...@@ -81,7 +81,7 @@ The last option is to import a project using a Rails console:
sudo -u git -H bundle exec rails console RAILS_ENV=production sudo -u git -H bundle exec rails console RAILS_ENV=production
``` ```
1. Create a project and run `ProjectTreeRestorer`: 1. Create a project and run `Project::TreeRestorer`:
```ruby ```ruby
shared_class = Struct.new(:export_path) do shared_class = Struct.new(:export_path) do
...@@ -98,7 +98,7 @@ The last option is to import a project using a Rails console: ...@@ -98,7 +98,7 @@ The last option is to import a project using a Rails console:
begin begin
#Enable Request store #Enable Request store
RequestStore.begin! RequestStore.begin!
Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project).restore Gitlab::ImportExport::Project::TreeRestorer.new(user: user, shared: shared, project: project).restore
ensure ensure
RequestStore.end! RequestStore.end!
RequestStore.clear! RequestStore.clear!
...@@ -128,11 +128,11 @@ The last option is to import a project using a Rails console: ...@@ -128,11 +128,11 @@ The last option is to import a project using a Rails console:
For Performance testing, we should: For Performance testing, we should:
- Import a quite large project, [`gitlabhq`](https://gitlab.com/gitlab-org/quality/performance-data#gitlab-performance-test-framework-data) should be a good example. - Import a quite large project, [`gitlabhq`](https://gitlab.com/gitlab-org/quality/performance-data#gitlab-performance-test-framework-data) should be a good example.
- Measure the execution time of `ProjectTreeRestorer`. - Measure the execution time of `Project::TreeRestorer`.
- Count the number of executed SQL queries during the restore. - Count the number of executed SQL queries during the restore.
- Observe the number of GC cycles happening. - Observe the number of GC cycles happening.
You can use this [snippet](https://gitlab.com/gitlab-org/gitlab/snippets/1924954), which will restore the project, and measure the execution time of `ProjectTreeRestorer`, number of SQL queries and number of GC cycles happening. You can use this [snippet](https://gitlab.com/gitlab-org/gitlab/snippets/1924954), which will restore the project, and measure the execution time of `Project::TreeRestorer`, number of SQL queries and number of GC cycles happening.
You can execute the script from the `gdk/gitlab` directory like this: You can execute the script from the `gdk/gitlab` directory like this:
......
...@@ -89,7 +89,7 @@ The following items will NOT be exported: ...@@ -89,7 +89,7 @@ The following items will NOT be exported:
NOTE: **Note:** NOTE: **Note:**
For more details on the specific data persisted in a project export, see the For more details on the specific data persisted in a project export, see the
[`import_export.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/import_export/import_export.yml) file. [`import_export.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/import_export/project/import_export.yml) file.
## Exporting a project and its data ## Exporting a project and its data
......
# frozen_string_literal: true
module EE
module Gitlab
module ImportExport
module GroupProjectObjectBuilder
extend ::Gitlab::Utils::Override
extend ActiveSupport::Concern
private
override :where_clause_for_klass
def where_clause_for_klass
return attrs_to_arel(attributes.slice('filename')).and(table[:issue_id].eq(nil)) if design?
super
end
def design?
klass == DesignManagement::Design
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module ImportExport
module Project
module ObjectBuilder
extend ::Gitlab::Utils::Override
extend ActiveSupport::Concern
private
override :where_clause_for_klass
def where_clause_for_klass
return attrs_to_arel(attributes.slice('filename')).and(table[:issue_id].eq(nil)) if design?
super
end
def design?
klass == DesignManagement::Design
end
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module ImportExport
module Project
module RelationFactory
extend ActiveSupport::Concern
EE_OVERRIDES = {
design: 'DesignManagement::Design',
designs: 'DesignManagement::Design',
design_versions: 'DesignManagement::Version',
actions: 'DesignManagement::Action',
deploy_access_levels: 'ProtectedEnvironment::DeployAccessLevel',
unprotect_access_levels: 'ProtectedBranch::UnprotectAccessLevel'
}.freeze
EE_EXISTING_OBJECT_RELATIONS = %i[DesignManagement::Design].freeze
class_methods do
extend ::Gitlab::Utils::Override
override :overrides
def overrides
super.merge(EE_OVERRIDES)
end
override :existing_object_relations
def existing_object_relations
super + EE_EXISTING_OBJECT_RELATIONS
end
end
end
end
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module ImportExport
module ProjectRelationFactory
extend ActiveSupport::Concern
EE_OVERRIDES = {
design: 'DesignManagement::Design',
designs: 'DesignManagement::Design',
design_versions: 'DesignManagement::Version',
actions: 'DesignManagement::Action',
deploy_access_levels: 'ProtectedEnvironment::DeployAccessLevel',
unprotect_access_levels: 'ProtectedBranch::UnprotectAccessLevel'
}.freeze
EE_EXISTING_OBJECT_RELATIONS = %i[DesignManagement::Design].freeze
class_methods do
extend ::Gitlab::Utils::Override
override :overrides
def overrides
super.merge(EE_OVERRIDES)
end
override :existing_object_relations
def existing_object_relations
super + EE_EXISTING_OBJECT_RELATIONS
end
end
end
end
end
end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupTreeRestorer do describe Gitlab::ImportExport::Group::TreeRestorer do
include ImportExport::CommonUtil include ImportExport::CommonUtil
let(:user) { create(:user) } let(:user) { create(:user) }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupTreeSaver do describe Gitlab::ImportExport::Group::TreeSaver do
describe 'saves the group tree into a json object' do describe 'saves the group tree into a json object' do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
......
# frozen_string_literal: true # frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::ProjectTreeRestorer do describe Gitlab::ImportExport::Project::TreeRestorer do
include ImportExport::CommonUtil include ImportExport::CommonUtil
using RSpec::Parameterized::TableSyntax using RSpec::Parameterized::TableSyntax
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::ProjectTreeSaver do describe Gitlab::ImportExport::Project::TreeSaver do
describe 'saves the project tree into a json object' do describe 'saves the project tree into a json object' do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupRelationFactory do describe Gitlab::ImportExport::Group::RelationFactory do
let(:group) { create(:group) } let(:group) { create(:group) }
let(:members_mapper) { double('members_mapper').as_null_object } let(:members_mapper) { double('members_mapper').as_null_object }
let(:user) { create(:admin) } let(:user) { create(:admin) }
...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::GroupRelationFactory do ...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::GroupRelationFactory do
described_class.create(relation_sym: relation_sym, described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash, relation_hash: relation_hash,
members_mapper: members_mapper, members_mapper: members_mapper,
object_builder: Gitlab::ImportExport::GroupObjectBuilder, object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
user: user, user: user,
importable: group, importable: group,
excluded_keys: excluded_keys) excluded_keys: excluded_keys)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupProjectObjectBuilder do describe Gitlab::ImportExport::Project::ObjectBuilder do
let!(:group) { create(:group, :private) } let!(:group) { create(:group, :private) }
let!(:subgroup) { create(:group, :private, parent: group) } let!(:subgroup) { create(:group, :private, parent: group) }
let!(:project) do let!(:project) do
......
...@@ -43,7 +43,7 @@ module Gitlab ...@@ -43,7 +43,7 @@ module Gitlab
end end
def config_file def config_file
Rails.root.join('lib/gitlab/import_export/import_export.yml') Rails.root.join('lib/gitlab/import_export/project/import_export.yml')
end end
def version_filename def version_filename
...@@ -77,7 +77,7 @@ module Gitlab ...@@ -77,7 +77,7 @@ module Gitlab
end end
def group_config_file def group_config_file
Rails.root.join('lib/gitlab/import_export/group_import_export.yml') Rails.root.join('lib/gitlab/import_export/group/import_export.yml')
end end
end end
end end
......
...@@ -4,8 +4,8 @@ module Gitlab ...@@ -4,8 +4,8 @@ module Gitlab
module ImportExport module ImportExport
class AttributeCleaner class AttributeCleaner
ALLOWED_REFERENCES = [ ALLOWED_REFERENCES = [
*ProjectRelationFactory::PROJECT_REFERENCES, *Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES,
*ProjectRelationFactory::USER_REFERENCES, *Gitlab::ImportExport::Project::RelationFactory::USER_REFERENCES,
'group_id', 'group_id',
'commit_id', 'commit_id',
'discussion_id', 'discussion_id',
......
# frozen_string_literal: true
module Gitlab
module ImportExport
module Base
# Base class for Group & Project Object Builders.
# This class is not intended to be used on its own but
# rather inherited from.
#
# Cache keeps 1000 entries at most, 1000 is chosen based on:
# - one cache entry uses around 0.5K memory, 1000 items uses around 500K.
# (leave some buffer it should be less than 1M). It is afforable cost for project import.
# - for projects in Gitlab.com, it seems 1000 entries for labels/milestones is enough.
# For example, gitlab has ~970 labels and 26 milestones.
LRU_CACHE_SIZE = 1000
class ObjectBuilder
def self.build(*args)
new(*args).find
end
def initialize(klass, attributes)
@klass = klass.ancestors.include?(Label) ? Label : klass
@attributes = attributes
if Gitlab::SafeRequestStore.active?
@lru_cache = cache_from_request_store
@cache_key = [klass, attributes]
end
end
def find
find_with_cache do
find_object || klass.create(prepare_attributes)
end
end
protected
def where_clauses
raise NotImplementedError
end
# attributes wrapped in a method to be
# adjusted in sub-class if needed
def prepare_attributes
attributes
end
private
attr_reader :klass, :attributes, :lru_cache, :cache_key
def find_with_cache
return yield unless lru_cache && cache_key
lru_cache[cache_key] ||= yield
end
def cache_from_request_store
Gitlab::SafeRequestStore[:lru_cache] ||= LruRedux::Cache.new(LRU_CACHE_SIZE)
end
def find_object
klass.where(where_clause).first
end
def where_clause
where_clauses.reduce(:and)
end
def table
@table ||= klass.arel_table
end
# Returns Arel clause:
# `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
# from the given Hash of attributes.
def attrs_to_arel(attrs)
attrs.map do |key, value|
table[key].eq(value)
end.reduce(:and)
end
# Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
# if attributes has 'title key, otherwise `nil`.
def where_clause_for_title
attrs_to_arel(attributes.slice('title'))
end
# Returns Arel clause `"{table_name}"."description" = '{attributes['description']}'`
# if attributes has 'description key, otherwise `nil`.
def where_clause_for_description
attrs_to_arel(attributes.slice('description'))
end
# Returns Arel clause `"{table_name}"."created_at" = '{attributes['created_at']}'`
# if attributes has 'created_at key, otherwise `nil`.
def where_clause_for_created_at
attrs_to_arel(attributes.slice('created_at'))
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Base
class RelationFactory
include Gitlab::Utils::StrongMemoize
IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
OVERRIDES = {}.freeze
EXISTING_OBJECT_RELATIONS = %i[].freeze
# This represents all relations that have unique key on `project_id` or `group_id`
UNIQUE_RELATIONS = %i[].freeze
USER_REFERENCES = %w[
author_id
assignee_id
updated_by_id
merged_by_id
latest_closed_by_id
user_id
created_by_id
last_edited_by_id
merge_user_id
resolved_by_id
closed_by_id
owner_id
].freeze
TOKEN_RESET_MODELS = %i[Project Namespace Group Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
def self.create(*args)
new(*args).create
end
def self.relation_class(relation_name)
# There are scenarios where the model is pluralized (e.g.
# MergeRequest::Metrics), and we don't want to force it to singular
# with #classify.
relation_name.to_s.classify.constantize
rescue NameError
relation_name.to_s.constantize
end
def initialize(relation_sym:, relation_hash:, members_mapper:, object_builder:, user:, importable:, excluded_keys: [])
@relation_name = self.class.overrides[relation_sym]&.to_sym || relation_sym
@relation_hash = relation_hash.except('noteable_id')
@members_mapper = members_mapper
@object_builder = object_builder
@user = user
@importable = importable
@imported_object_retries = 0
@relation_hash[importable_column_name] = @importable.id
# Remove excluded keys from relation_hash
# We don't do this in the parsed_relation_hash because of the 'transformed attributes'
# For example, MergeRequestDiffFiles exports its diff attribute as utf8_diff. Then,
# in the create method that attribute is renamed to diff. And because diff is an excluded key,
# if we clean the excluded keys in the parsed_relation_hash, it will be removed
# from the object attributes and the export will fail.
@relation_hash.except!(*excluded_keys)
end
# Creates an object from an actual model with name "relation_sym" with params from
# the relation_hash, updating references with new object IDs, mapping users using
# the "members_mapper" object, also updating notes if required.
def create
return if invalid_relation?
setup_base_models
setup_models
generate_imported_object
end
def self.overrides
self::OVERRIDES
end
def self.existing_object_relations
self::EXISTING_OBJECT_RELATIONS
end
private
def invalid_relation?
false
end
def setup_models
raise NotImplementedError
end
def unique_relations
# define in sub-class if any
self.class::UNIQUE_RELATIONS
end
def setup_base_models
update_user_references
remove_duplicate_assignees
reset_tokens!
remove_encrypted_attributes!
end
def update_user_references
self.class::USER_REFERENCES.each do |reference|
if @relation_hash[reference]
@relation_hash[reference] = @members_mapper.map[@relation_hash[reference]]
end
end
end
def remove_duplicate_assignees
return unless @relation_hash['issue_assignees']
# When an assignee did not exist in the members mapper, the importer is
# assigned. We only need to assign each user once.
@relation_hash['issue_assignees'].uniq!(&:user_id)
end
def generate_imported_object
imported_object
end
def reset_tokens!
return unless Gitlab::ImportExport.reset_tokens? && self.class::TOKEN_RESET_MODELS.include?(@relation_name)
# If we import/export to the same instance, tokens will have to be reset.
# We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across.
relation_class.attribute_names.select { |name| name.include?('token') }.each do |token|
@relation_hash[token] = nil
end
end
def remove_encrypted_attributes!
return unless relation_class.respond_to?(:encrypted_attributes) && relation_class.encrypted_attributes.any?
relation_class.encrypted_attributes.each_key do |key|
@relation_hash[key.to_s] = nil
end
end
def relation_class
@relation_class ||= self.class.relation_class(@relation_name)
end
def importable_column_name
importable_class_name.concat('_id')
end
def importable_class_name
@importable.class.to_s.downcase
end
def imported_object
if existing_or_new_object.respond_to?(:importing)
existing_or_new_object.importing = true
end
existing_or_new_object
rescue ActiveRecord::RecordNotUnique
# as the operation is not atomic, retry in the unlikely scenario an INSERT is
# performed on the same object between the SELECT and the INSERT
@imported_object_retries += 1
retry if @imported_object_retries < IMPORTED_OBJECT_MAX_RETRIES
end
def parsed_relation_hash
@parsed_relation_hash ||= Gitlab::ImportExport::AttributeCleaner.clean(relation_hash: @relation_hash,
relation_class: relation_class)
end
def existing_or_new_object
# Only find existing records to avoid mapping tables such as milestones
# Otherwise always create the record, skipping the extra SELECT clause.
@existing_or_new_object ||= begin
if existing_object?
attribute_hash = attribute_hash_for(['events'])
existing_object.assign_attributes(attribute_hash) if attribute_hash.any?
existing_object
else
# Because of single-type inheritance, we need to be careful to use the `type` field
# See https://gitlab.com/gitlab-org/gitlab/issues/34860#note_235321497
inheritance_column = relation_class.try(:inheritance_column)
inheritance_attributes = parsed_relation_hash.slice(inheritance_column)
object = relation_class.new(inheritance_attributes)
object.assign_attributes(parsed_relation_hash)
object
end
end
end
def attribute_hash_for(attributes)
attributes.each_with_object({}) do |hash, value|
hash[value] = parsed_relation_hash.delete(value) if parsed_relation_hash[value]
hash
end
end
def existing_object
@existing_object ||= find_or_create_object!
end
def unique_relation_object
unique_relation_object = relation_class.find_or_create_by(importable_column_name => @importable.id)
unique_relation_object.assign_attributes(parsed_relation_hash)
unique_relation_object
end
def find_or_create_object!
return unique_relation_object if unique_relation?
# Can't use IDs as validation exists calling `group` or `project` attributes
finder_hash = parsed_relation_hash.tap do |hash|
if relation_class.attribute_method?('group_id') && @importable.is_a?(::Project)
hash['group'] = @importable.group
end
hash[importable_class_name] = @importable if relation_class.reflect_on_association(importable_class_name.to_sym)
hash.delete(importable_column_name)
end
@object_builder.build(relation_class, finder_hash)
end
def setup_note
set_note_author
# attachment is deprecated and note uploads are handled by Markdown uploader
@relation_hash['attachment'] = nil
end
# Sets the author for a note. If the user importing the project
# has admin access, an actual mapping with new project members
# will be used. Otherwise, a note stating the original author name
# is left.
def set_note_author
old_author_id = @relation_hash['author_id']
author = @relation_hash.delete('author')
update_note_for_missing_author(author['name']) unless has_author?(old_author_id)
end
def has_author?(old_author_id)
admin_user? && @members_mapper.include?(old_author_id)
end
def missing_author_note(updated_at, author_name)
timestamp = updated_at.split('.').first
"\n\n *By #{author_name} on #{timestamp} (imported from GitLab project)*"
end
def update_note_for_missing_author(author_name)
@relation_hash['note'] = '*Blank note*' if @relation_hash['note'].blank?
@relation_hash['note'] = "#{@relation_hash['note']}#{missing_author_note(@relation_hash['updated_at'], author_name)}"
end
def admin_user?
@user.admin?
end
def existing_object?
strong_memoize(:_existing_object) do
self.class.existing_object_relations.include?(@relation_name) || unique_relation?
end
end
def unique_relation?
strong_memoize(:unique_relation) do
importable_foreign_key.present? &&
(has_unique_index_on_importable_fk? || uses_importable_fk_as_primary_key?)
end
end
def has_unique_index_on_importable_fk?
cache = cached_has_unique_index_on_importable_fk
table_name = relation_class.table_name
return cache[table_name] if cache.has_key?(table_name)
index_exists =
ActiveRecord::Base.connection.index_exists?(
relation_class.table_name,
importable_foreign_key,
unique: true)
cache[table_name] = index_exists
end
# Avoid unnecessary DB requests
def cached_has_unique_index_on_importable_fk
Thread.current[:cached_has_unique_index_on_importable_fk] ||= {}
end
def uses_importable_fk_as_primary_key?
relation_class.primary_key == importable_foreign_key
end
def importable_foreign_key
relation_class.reflect_on_association(importable_class_name.to_sym)&.foreign_key
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
# Base class for Group & Project Object Builders.
# This class is not intended to be used on its own but
# rather inherited from.
#
# Cache keeps 1000 entries at most, 1000 is chosen based on:
# - one cache entry uses around 0.5K memory, 1000 items uses around 500K.
# (leave some buffer it should be less than 1M). It is afforable cost for project import.
# - for projects in Gitlab.com, it seems 1000 entries for labels/milestones is enough.
# For example, gitlab has ~970 labels and 26 milestones.
LRU_CACHE_SIZE = 1000
class BaseObjectBuilder
def self.build(*args)
new(*args).find
end
def initialize(klass, attributes)
@klass = klass.ancestors.include?(Label) ? Label : klass
@attributes = attributes
if Gitlab::SafeRequestStore.active?
@lru_cache = cache_from_request_store
@cache_key = [klass, attributes]
end
end
def find
find_with_cache do
find_object || klass.create(prepare_attributes)
end
end
protected
def where_clauses
raise NotImplementedError
end
# attributes wrapped in a method to be
# adjusted in sub-class if needed
def prepare_attributes
attributes
end
private
attr_reader :klass, :attributes, :lru_cache, :cache_key
def find_with_cache
return yield unless lru_cache && cache_key
lru_cache[cache_key] ||= yield
end
def cache_from_request_store
Gitlab::SafeRequestStore[:lru_cache] ||= LruRedux::Cache.new(LRU_CACHE_SIZE)
end
def find_object
klass.where(where_clause).first
end
def where_clause
where_clauses.reduce(:and)
end
def table
@table ||= klass.arel_table
end
# Returns Arel clause:
# `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
# from the given Hash of attributes.
def attrs_to_arel(attrs)
attrs.map do |key, value|
table[key].eq(value)
end.reduce(:and)
end
# Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
# if attributes has 'title key, otherwise `nil`.
def where_clause_for_title
attrs_to_arel(attributes.slice('title'))
end
# Returns Arel clause `"{table_name}"."description" = '{attributes['description']}'`
# if attributes has 'description key, otherwise `nil`.
def where_clause_for_description
attrs_to_arel(attributes.slice('description'))
end
# Returns Arel clause `"{table_name}"."created_at" = '{attributes['created_at']}'`
# if attributes has 'created_at key, otherwise `nil`.
def where_clause_for_created_at
attrs_to_arel(attributes.slice('created_at'))
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class BaseRelationFactory
include Gitlab::Utils::StrongMemoize
IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
OVERRIDES = {}.freeze
EXISTING_OBJECT_RELATIONS = %i[].freeze
# This represents all relations that have unique key on `project_id` or `group_id`
UNIQUE_RELATIONS = %i[].freeze
USER_REFERENCES = %w[
author_id
assignee_id
updated_by_id
merged_by_id
latest_closed_by_id
user_id
created_by_id
last_edited_by_id
merge_user_id
resolved_by_id
closed_by_id
owner_id
].freeze
TOKEN_RESET_MODELS = %i[Project Namespace Group Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
def self.create(*args)
new(*args).create
end
def self.relation_class(relation_name)
# There are scenarios where the model is pluralized (e.g.
# MergeRequest::Metrics), and we don't want to force it to singular
# with #classify.
relation_name.to_s.classify.constantize
rescue NameError
relation_name.to_s.constantize
end
def initialize(relation_sym:, relation_hash:, members_mapper:, object_builder:, user:, importable:, excluded_keys: [])
@relation_name = self.class.overrides[relation_sym]&.to_sym || relation_sym
@relation_hash = relation_hash.except('noteable_id')
@members_mapper = members_mapper
@object_builder = object_builder
@user = user
@importable = importable
@imported_object_retries = 0
@relation_hash[importable_column_name] = @importable.id
# Remove excluded keys from relation_hash
# We don't do this in the parsed_relation_hash because of the 'transformed attributes'
# For example, MergeRequestDiffFiles exports its diff attribute as utf8_diff. Then,
# in the create method that attribute is renamed to diff. And because diff is an excluded key,
# if we clean the excluded keys in the parsed_relation_hash, it will be removed
# from the object attributes and the export will fail.
@relation_hash.except!(*excluded_keys)
end
# Creates an object from an actual model with name "relation_sym" with params from
# the relation_hash, updating references with new object IDs, mapping users using
# the "members_mapper" object, also updating notes if required.
def create
return if invalid_relation?
setup_base_models
setup_models
generate_imported_object
end
def self.overrides
self::OVERRIDES
end
def self.existing_object_relations
self::EXISTING_OBJECT_RELATIONS
end
private
def invalid_relation?
false
end
def setup_models
raise NotImplementedError
end
def unique_relations
# define in sub-class if any
self.class::UNIQUE_RELATIONS
end
def setup_base_models
update_user_references
remove_duplicate_assignees
reset_tokens!
remove_encrypted_attributes!
end
def update_user_references
self.class::USER_REFERENCES.each do |reference|
if @relation_hash[reference]
@relation_hash[reference] = @members_mapper.map[@relation_hash[reference]]
end
end
end
def remove_duplicate_assignees
return unless @relation_hash['issue_assignees']
# When an assignee did not exist in the members mapper, the importer is
# assigned. We only need to assign each user once.
@relation_hash['issue_assignees'].uniq!(&:user_id)
end
def generate_imported_object
imported_object
end
def reset_tokens!
return unless Gitlab::ImportExport.reset_tokens? && self.class::TOKEN_RESET_MODELS.include?(@relation_name)
# If we import/export to the same instance, tokens will have to be reset.
# We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across.
relation_class.attribute_names.select { |name| name.include?('token') }.each do |token|
@relation_hash[token] = nil
end
end
def remove_encrypted_attributes!
return unless relation_class.respond_to?(:encrypted_attributes) && relation_class.encrypted_attributes.any?
relation_class.encrypted_attributes.each_key do |key|
@relation_hash[key.to_s] = nil
end
end
def relation_class
@relation_class ||= self.class.relation_class(@relation_name)
end
def importable_column_name
importable_class_name.concat('_id')
end
def importable_class_name
@importable.class.to_s.downcase
end
def imported_object
if existing_or_new_object.respond_to?(:importing)
existing_or_new_object.importing = true
end
existing_or_new_object
rescue ActiveRecord::RecordNotUnique
# as the operation is not atomic, retry in the unlikely scenario an INSERT is
# performed on the same object between the SELECT and the INSERT
@imported_object_retries += 1
retry if @imported_object_retries < IMPORTED_OBJECT_MAX_RETRIES
end
def parsed_relation_hash
@parsed_relation_hash ||= Gitlab::ImportExport::AttributeCleaner.clean(relation_hash: @relation_hash,
relation_class: relation_class)
end
def existing_or_new_object
# Only find existing records to avoid mapping tables such as milestones
# Otherwise always create the record, skipping the extra SELECT clause.
@existing_or_new_object ||= begin
if existing_object?
attribute_hash = attribute_hash_for(['events'])
existing_object.assign_attributes(attribute_hash) if attribute_hash.any?
existing_object
else
# Because of single-type inheritance, we need to be careful to use the `type` field
# See https://gitlab.com/gitlab-org/gitlab/issues/34860#note_235321497
inheritance_column = relation_class.try(:inheritance_column)
inheritance_attributes = parsed_relation_hash.slice(inheritance_column)
object = relation_class.new(inheritance_attributes)
object.assign_attributes(parsed_relation_hash)
object
end
end
end
def attribute_hash_for(attributes)
attributes.each_with_object({}) do |hash, value|
hash[value] = parsed_relation_hash.delete(value) if parsed_relation_hash[value]
hash
end
end
def existing_object
@existing_object ||= find_or_create_object!
end
def unique_relation_object
unique_relation_object = relation_class.find_or_create_by(importable_column_name => @importable.id)
unique_relation_object.assign_attributes(parsed_relation_hash)
unique_relation_object
end
def find_or_create_object!
return unique_relation_object if unique_relation?
# Can't use IDs as validation exists calling `group` or `project` attributes
finder_hash = parsed_relation_hash.tap do |hash|
if relation_class.attribute_method?('group_id') && @importable.is_a?(Project)
hash['group'] = @importable.group
end
hash[importable_class_name] = @importable if relation_class.reflect_on_association(importable_class_name.to_sym)
hash.delete(importable_column_name)
end
@object_builder.build(relation_class, finder_hash)
end
def setup_note
set_note_author
# attachment is deprecated and note uploads are handled by Markdown uploader
@relation_hash['attachment'] = nil
end
# Sets the author for a note. If the user importing the project
# has admin access, an actual mapping with new project members
# will be used. Otherwise, a note stating the original author name
# is left.
def set_note_author
old_author_id = @relation_hash['author_id']
author = @relation_hash.delete('author')
update_note_for_missing_author(author['name']) unless has_author?(old_author_id)
end
def has_author?(old_author_id)
admin_user? && @members_mapper.include?(old_author_id)
end
def missing_author_note(updated_at, author_name)
timestamp = updated_at.split('.').first
"\n\n *By #{author_name} on #{timestamp} (imported from GitLab project)*"
end
def update_note_for_missing_author(author_name)
@relation_hash['note'] = '*Blank note*' if @relation_hash['note'].blank?
@relation_hash['note'] = "#{@relation_hash['note']}#{missing_author_note(@relation_hash['updated_at'], author_name)}"
end
def admin_user?
@user.admin?
end
def existing_object?
strong_memoize(:_existing_object) do
self.class.existing_object_relations.include?(@relation_name) || unique_relation?
end
end
def unique_relation?
strong_memoize(:unique_relation) do
importable_foreign_key.present? &&
(has_unique_index_on_importable_fk? || uses_importable_fk_as_primary_key?)
end
end
def has_unique_index_on_importable_fk?
cache = cached_has_unique_index_on_importable_fk
table_name = relation_class.table_name
return cache[table_name] if cache.has_key?(table_name)
index_exists =
ActiveRecord::Base.connection.index_exists?(
relation_class.table_name,
importable_foreign_key,
unique: true)
cache[table_name] = index_exists
end
# Avoid unnecessary DB requests
def cached_has_unique_index_on_importable_fk
Thread.current[:cached_has_unique_index_on_importable_fk] ||= {}
end
def uses_importable_fk_as_primary_key?
relation_class.primary_key == importable_foreign_key
end
def importable_foreign_key
relation_class.reflect_on_association(importable_class_name.to_sym)&.foreign_key
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Group
# Given a class, it finds or creates a new object at group level.
#
# Example:
# `Group::ObjectBuilder.build(Label, label_attributes)`
# finds or initializes a label with the given attributes.
class ObjectBuilder < Base::ObjectBuilder
def self.build(*args)
::Group.transaction do
super
end
end
def initialize(klass, attributes)
super
@group = @attributes['group']
update_description
end
private
attr_reader :group
# Convert description empty string to nil
# due to existing object being saved with description: nil
# Which makes object lookup to fail since nil != ''
def update_description
attributes['description'] = nil if attributes['description'] == ''
end
def where_clauses
[
where_clause_base,
where_clause_for_title,
where_clause_for_description,
where_clause_for_created_at
].compact
end
# Returns Arel clause `"{table_name}"."group_id" = {group.id}`
def where_clause_base
table[:group_id].in(group_and_ancestor_ids)
end
def group_and_ancestor_ids
group.ancestors.map(&:id) << group.id
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Group
class RelationFactory < Base::RelationFactory
OVERRIDES = {
labels: :group_labels,
priorities: :label_priorities,
label: :group_label,
parent: :epic
}.freeze
EXISTING_OBJECT_RELATIONS = %i[
epic
epics
milestone
milestones
label
labels
group_label
group_labels
].freeze
private
def setup_models
setup_note if @relation_name == :notes
update_group_references
end
def update_group_references
return unless self.class.existing_object_relations.include?(@relation_name)
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @importable.id
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Group
class TreeRestorer
attr_reader :user
attr_reader :shared
attr_reader :group
def initialize(user:, shared:, group:, group_hash:)
@path = File.join(shared.export_path, 'group.json')
@user = user
@shared = shared
@group = group
@group_hash = group_hash
end
def restore
@tree_hash = @group_hash || read_tree_hash
@group_members = @tree_hash.delete('members')
@children = @tree_hash.delete('children')
if members_mapper.map && restorer.restore
@children&.each do |group_hash|
group = create_group(group_hash: group_hash, parent_group: @group)
shared = Gitlab::ImportExport::Shared.new(group)
self.class.new(
user: @user,
shared: shared,
group: group,
group_hash: group_hash
).restore
end
end
return false if @shared.errors.any?
true
rescue => e
@shared.error(e)
false
end
private
def read_tree_hash
json = IO.read(@path)
ActiveSupport::JSON.decode(json)
rescue => e
@shared.logger.error(
group_id: @group.id,
group_name: @group.name,
message: "Import/Export error: #{e.message}"
)
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
end
def restorer
@relation_tree_restorer ||= RelationTreeRestorer.new(
user: @user,
shared: @shared,
importable: @group,
tree_hash: @tree_hash.except('name', 'path'),
members_mapper: members_mapper,
object_builder: object_builder,
relation_factory: relation_factory,
reader: reader
)
end
def create_group(group_hash:, parent_group:)
group_params = {
name: group_hash['name'],
path: group_hash['path'],
parent_id: parent_group&.id,
visibility_level: sub_group_visibility_level(group_hash, parent_group)
}
::Groups::CreateService.new(@user, group_params).execute
end
def sub_group_visibility_level(group_hash, parent_group)
original_visibility_level = group_hash['visibility_level'] || Gitlab::VisibilityLevel::PRIVATE
if parent_group && parent_group.visibility_level < original_visibility_level
Gitlab::VisibilityLevel.closest_allowed_level(parent_group.visibility_level)
else
original_visibility_level
end
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @group_members, user: @user, importable: @group)
end
def relation_factory
Gitlab::ImportExport::Group::RelationFactory
end
def object_builder
Gitlab::ImportExport::Group::ObjectBuilder
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(
shared: @shared,
config: Gitlab::ImportExport::Config.new(
config: Gitlab::ImportExport.group_config_file
).to_h
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Group
class TreeSaver
attr_reader :full_path, :shared
def initialize(group:, current_user:, shared:, params: {})
@params = params
@current_user = current_user
@shared = shared
@group = group
@full_path = File.join(@shared.export_path, ImportExport.group_filename)
end
def save
group_tree = serialize(@group, reader.group_tree)
tree_saver.save(group_tree, @shared.export_path, ImportExport.group_filename)
true
rescue => e
@shared.error(e)
false
end
private
def serialize(group, relations_tree)
group_tree = tree_saver.serialize(group, relations_tree)
group.children.each do |child|
group_tree['children'] ||= []
group_tree['children'] << serialize(child, relations_tree)
end
group_tree
rescue => e
@shared.error(e)
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(
shared: @shared,
config: Gitlab::ImportExport::Config.new(
config: Gitlab::ImportExport.group_config_file
).to_h
)
end
def tree_saver
@tree_saver ||= RelationTreeSaver.new
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
# Given a class, it finds or creates a new object at group level.
#
# Example:
# `GroupObjectBuilder.build(Label, label_attributes)`
# finds or initializes a label with the given attributes.
class GroupObjectBuilder < BaseObjectBuilder
def self.build(*args)
Group.transaction do
super
end
end
def initialize(klass, attributes)
super
@group = @attributes['group']
update_description
end
private
attr_reader :group
# Convert description empty string to nil
# due to existing object being saved with description: nil
# Which makes object lookup to fail since nil != ''
def update_description
attributes['description'] = nil if attributes['description'] == ''
end
def where_clauses
[
where_clause_base,
where_clause_for_title,
where_clause_for_description,
where_clause_for_created_at
].compact
end
# Returns Arel clause `"{table_name}"."group_id" = {group.id}`
def where_clause_base
table[:group_id].in(group_and_ancestor_ids)
end
def group_and_ancestor_ids
group.ancestors.map(&:id) << group.id
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
# Given a class, it finds or creates a new object
# (initializes in the case of Label) at group or project level.
# If it does not exist in the group, it creates it at project level.
#
# Example:
# `GroupProjectObjectBuilder.build(Label, label_attributes)`
# finds or initializes a label with the given attributes.
#
# It also adds some logic around Group Labels/Milestones for edge cases.
class GroupProjectObjectBuilder < BaseObjectBuilder
def self.build(*args)
Project.transaction do
super
end
end
def initialize(klass, attributes)
super
@group = @attributes['group']
@project = @attributes['project']
end
def find
return if epic? && group.nil?
super
end
private
attr_reader :group, :project
def where_clauses
[
where_clause_base,
where_clause_for_title,
where_clause_for_klass
].compact
end
# Returns Arel clause `"{table_name}"."project_id" = {project.id}` if project is present
# For example: merge_request has :target_project_id, and we are searching by :iid
# or, if group is present:
# `"{table_name}"."project_id" = {project.id} OR "{table_name}"."group_id" = {group.id}`
def where_clause_base
[].tap do |clauses|
clauses << table[:project_id].eq(project.id) if project
clauses << table[:group_id].in(group.self_and_ancestors_ids) if group
end.reduce(:or)
end
# Returns Arel clause for a particular model or `nil`.
def where_clause_for_klass
attrs_to_arel(attributes.slice('iid')) if merge_request?
end
def prepare_attributes
attributes.dup.tap do |atts|
atts.delete('group') unless epic?
if label?
atts['type'] = 'ProjectLabel' # Always create project labels
elsif milestone?
if atts['group_id'] # Transform new group milestones into project ones
atts['iid'] = nil
atts.delete('group_id')
else
claim_iid
end
end
atts['importing'] = true if klass.ancestors.include?(Importable)
end
end
def label?
klass == Label
end
def milestone?
klass == Milestone
end
def merge_request?
klass == MergeRequest
end
def epic?
klass == Epic
end
# If an existing group milestone used the IID
# claim the IID back and set the group milestone to use one available
# This is necessary to fix situations like the following:
# - Importing into a user namespace project with exported group milestones
# where the IID of the Group milestone could conflict with a project one.
def claim_iid
# The milestone has to be a group milestone, as it's the only case where
# we set the IID as the maximum. The rest of them are fixed.
milestone = project.milestones.find_by(iid: attributes['iid'])
return unless milestone
milestone.iid = nil
milestone.ensure_project_iid!
milestone.save!
end
end
end
end
Gitlab::ImportExport::GroupProjectObjectBuilder.prepend_if_ee('EE::Gitlab::ImportExport::GroupProjectObjectBuilder')
# frozen_string_literal: true
module Gitlab
module ImportExport
class GroupRelationFactory < BaseRelationFactory
OVERRIDES = {
labels: :group_labels,
priorities: :label_priorities,
label: :group_label,
parent: :epic
}.freeze
EXISTING_OBJECT_RELATIONS = %i[
epic
epics
milestone
milestones
label
labels
group_label
group_labels
].freeze
private
def setup_models
setup_note if @relation_name == :notes
update_group_references
end
def update_group_references
return unless self.class.existing_object_relations.include?(@relation_name)
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @importable.id
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class GroupTreeRestorer
attr_reader :user
attr_reader :shared
attr_reader :group
def initialize(user:, shared:, group:, group_hash:)
@path = File.join(shared.export_path, 'group.json')
@user = user
@shared = shared
@group = group
@group_hash = group_hash
end
def restore
@tree_hash = @group_hash || read_tree_hash
@group_members = @tree_hash.delete('members')
@children = @tree_hash.delete('children')
if members_mapper.map && restorer.restore
@children&.each do |group_hash|
group = create_group(group_hash: group_hash, parent_group: @group)
shared = Gitlab::ImportExport::Shared.new(group)
self.class.new(
user: @user,
shared: shared,
group: group,
group_hash: group_hash
).restore
end
end
return false if @shared.errors.any?
true
rescue => e
@shared.error(e)
false
end
private
def read_tree_hash
json = IO.read(@path)
ActiveSupport::JSON.decode(json)
rescue => e
@shared.logger.error(
group_id: @group.id,
group_name: @group.name,
message: "Import/Export error: #{e.message}"
)
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
end
def restorer
@relation_tree_restorer ||= RelationTreeRestorer.new(
user: @user,
shared: @shared,
importable: @group,
tree_hash: @tree_hash.except('name', 'path'),
members_mapper: members_mapper,
object_builder: object_builder,
relation_factory: relation_factory,
reader: reader
)
end
def create_group(group_hash:, parent_group:)
group_params = {
name: group_hash['name'],
path: group_hash['path'],
parent_id: parent_group&.id,
visibility_level: sub_group_visibility_level(group_hash, parent_group)
}
::Groups::CreateService.new(@user, group_params).execute
end
def sub_group_visibility_level(group_hash, parent_group)
original_visibility_level = group_hash['visibility_level'] || Gitlab::VisibilityLevel::PRIVATE
if parent_group && parent_group.visibility_level < original_visibility_level
Gitlab::VisibilityLevel.closest_allowed_level(parent_group.visibility_level)
else
original_visibility_level
end
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @group_members, user: @user, importable: @group)
end
def relation_factory
Gitlab::ImportExport::GroupRelationFactory
end
def object_builder
Gitlab::ImportExport::GroupObjectBuilder
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(
shared: @shared,
config: Gitlab::ImportExport::Config.new(
config: Gitlab::ImportExport.group_config_file
).to_h
)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class GroupTreeSaver
attr_reader :full_path, :shared
def initialize(group:, current_user:, shared:, params: {})
@params = params
@current_user = current_user
@shared = shared
@group = group
@full_path = File.join(@shared.export_path, ImportExport.group_filename)
end
def save
group_tree = serialize(@group, reader.group_tree)
tree_saver.save(group_tree, @shared.export_path, ImportExport.group_filename)
true
rescue => e
@shared.error(e)
false
end
private
def serialize(group, relations_tree)
group_tree = tree_saver.serialize(group, relations_tree)
group.children.each do |child|
group_tree['children'] ||= []
group_tree['children'] << serialize(child, relations_tree)
end
group_tree
rescue => e
@shared.error(e)
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(
shared: @shared,
config: Gitlab::ImportExport::Config.new(
config: Gitlab::ImportExport.group_config_file
).to_h
)
end
def tree_saver
@tree_saver ||= RelationTreeSaver.new
end
end
end
end
...@@ -49,7 +49,7 @@ module Gitlab ...@@ -49,7 +49,7 @@ module Gitlab
end end
def project_tree def project_tree
@project_tree ||= Gitlab::ImportExport::ProjectTreeRestorer.new(user: current_user, @project_tree ||= Gitlab::ImportExport::Project::TreeRestorer.new(user: current_user,
shared: shared, shared: shared,
project: project) project: project)
end end
...@@ -125,7 +125,7 @@ module Gitlab ...@@ -125,7 +125,7 @@ module Gitlab
def project_to_overwrite def project_to_overwrite
strong_memoize(:project_to_overwrite) do strong_memoize(:project_to_overwrite) do
Project.find_by_full_path("#{project.namespace.full_path}/#{original_path}") ::Project.find_by_full_path("#{project.namespace.full_path}/#{original_path}")
end end
end end
end end
......
...@@ -91,9 +91,9 @@ module Gitlab ...@@ -91,9 +91,9 @@ module Gitlab
def relation_class def relation_class
case @importable case @importable
when Project when ::Project
ProjectMember ProjectMember
when Group when ::Group
GroupMember GroupMember
end end
end end
......
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
# Given a class, it finds or creates a new object
# (initializes in the case of Label) at group or project level.
# If it does not exist in the group, it creates it at project level.
#
# Example:
# `ObjectBuilder.build(Label, label_attributes)`
# finds or initializes a label with the given attributes.
#
# It also adds some logic around Group Labels/Milestones for edge cases.
class ObjectBuilder < Base::ObjectBuilder
def self.build(*args)
::Project.transaction do
super
end
end
def initialize(klass, attributes)
super
@group = @attributes['group']
@project = @attributes['project']
end
def find
return if epic? && group.nil?
super
end
private
attr_reader :group, :project
def where_clauses
[
where_clause_base,
where_clause_for_title,
where_clause_for_klass
].compact
end
# Returns Arel clause `"{table_name}"."project_id" = {project.id}` if project is present
# For example: merge_request has :target_project_id, and we are searching by :iid
# or, if group is present:
# `"{table_name}"."project_id" = {project.id} OR "{table_name}"."group_id" = {group.id}`
def where_clause_base
[].tap do |clauses|
clauses << table[:project_id].eq(project.id) if project
clauses << table[:group_id].in(group.self_and_ancestors_ids) if group
end.reduce(:or)
end
# Returns Arel clause for a particular model or `nil`.
def where_clause_for_klass
attrs_to_arel(attributes.slice('iid')) if merge_request?
end
def prepare_attributes
attributes.dup.tap do |atts|
atts.delete('group') unless epic?
if label?
atts['type'] = 'ProjectLabel' # Always create project labels
elsif milestone?
if atts['group_id'] # Transform new group milestones into project ones
atts['iid'] = nil
atts.delete('group_id')
else
claim_iid
end
end
atts['importing'] = true if klass.ancestors.include?(Importable)
end
end
def label?
klass == Label
end
def milestone?
klass == Milestone
end
def merge_request?
klass == MergeRequest
end
def epic?
klass == Epic
end
# If an existing group milestone used the IID
# claim the IID back and set the group milestone to use one available
# This is necessary to fix situations like the following:
# - Importing into a user namespace project with exported group milestones
# where the IID of the Group milestone could conflict with a project one.
def claim_iid
# The milestone has to be a group milestone, as it's the only case where
# we set the IID as the maximum. The rest of them are fixed.
milestone = project.milestones.find_by(iid: attributes['iid'])
return unless milestone
milestone.iid = nil
milestone.ensure_project_iid!
milestone.save!
end
end
end
end
end
Gitlab::ImportExport::Project::ObjectBuilder.prepend_if_ee('EE::Gitlab::ImportExport::Project::ObjectBuilder')
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class RelationFactory < Base::RelationFactory
prepend_if_ee('::EE::Gitlab::ImportExport::Project::RelationFactory') # rubocop: disable Cop/InjectEnterpriseEditionModule
OVERRIDES = { snippets: :project_snippets,
ci_pipelines: 'Ci::Pipeline',
pipelines: 'Ci::Pipeline',
stages: 'Ci::Stage',
statuses: 'commit_status',
triggers: 'Ci::Trigger',
pipeline_schedules: 'Ci::PipelineSchedule',
builds: 'Ci::Build',
runners: 'Ci::Runner',
hooks: 'ProjectHook',
merge_access_levels: 'ProtectedBranch::MergeAccessLevel',
push_access_levels: 'ProtectedBranch::PushAccessLevel',
create_access_levels: 'ProtectedTag::CreateAccessLevel',
labels: :project_labels,
priorities: :label_priorities,
auto_devops: :project_auto_devops,
label: :project_label,
custom_attributes: 'ProjectCustomAttribute',
project_badges: 'Badge',
metrics: 'MergeRequest::Metrics',
ci_cd_settings: 'ProjectCiCdSetting',
error_tracking_setting: 'ErrorTracking::ProjectErrorTrackingSetting',
links: 'Releases::Link',
metrics_setting: 'ProjectMetricsSetting' }.freeze
BUILD_MODELS = %i[Ci::Build commit_status].freeze
GROUP_REFERENCES = %w[group_id].freeze
PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
EXISTING_OBJECT_RELATIONS = %i[
milestone
milestones
label
labels
project_label
project_labels
group_label
group_labels
project_feature
merge_request
epic
ProjectCiCdSetting
container_expiration_policy
].freeze
def create
@object = super
# We preload the project, user, and group to re-use objects
@object = preload_keys(@object, PROJECT_REFERENCES, @importable)
@object = preload_keys(@object, GROUP_REFERENCES, @importable.group)
@object = preload_keys(@object, USER_REFERENCES, @user)
end
private
def invalid_relation?
# Do not create relation if it is:
# - An unknown service
# - A legacy trigger
unknown_service? ||
(!Feature.enabled?(:use_legacy_pipeline_triggers, @importable) && legacy_trigger?)
end
def setup_models
case @relation_name
when :merge_request_diff_files then setup_diff
when :notes then setup_note
when :'Ci::Pipeline' then setup_pipeline
when *BUILD_MODELS then setup_build
end
update_project_references
update_group_references
end
def generate_imported_object
if @relation_name == :merge_requests
MergeRequestParser.new(@importable, @relation_hash.delete('diff_head_sha'), super, @relation_hash).parse!
else
super
end
end
def update_project_references
# If source and target are the same, populate them with the new project ID.
if @relation_hash['source_project_id']
@relation_hash['source_project_id'] = same_source_and_target? ? @relation_hash['project_id'] : MergeRequestParser::FORKED_PROJECT_ID
end
@relation_hash['target_project_id'] = @relation_hash['project_id'] if @relation_hash['target_project_id']
end
def same_source_and_target?
@relation_hash['target_project_id'] && @relation_hash['target_project_id'] == @relation_hash['source_project_id']
end
def update_group_references
return unless existing_object?
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @importable.namespace_id
end
def setup_build
@relation_hash.delete('trace') # old export files have trace
@relation_hash.delete('token')
@relation_hash.delete('commands')
@relation_hash.delete('artifacts_file_store')
@relation_hash.delete('artifacts_metadata_store')
@relation_hash.delete('artifacts_size')
end
def setup_diff
@relation_hash['diff'] = @relation_hash.delete('utf8_diff')
end
def setup_pipeline
@relation_hash.fetch('stages', []).each do |stage|
stage.statuses.each do |status|
status.pipeline = imported_object
end
end
end
def unknown_service?
@relation_name == :services && parsed_relation_hash['type'] &&
!Object.const_defined?(parsed_relation_hash['type'])
end
def legacy_trigger?
@relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
end
def preload_keys(object, references, value)
return object unless value
references.each do |key|
attribute = "#{key.delete_suffix('_id')}=".to_sym
next unless object.respond_to?(key) && object.respond_to?(attribute)
if object.read_attribute(key) == value&.id
object.public_send(attribute, value) # rubocop:disable GitlabSecurity/PublicSend
end
end
object
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class TreeLoader
def load(path, dedup_entries: false)
tree_hash = ActiveSupport::JSON.decode(IO.read(path))
if dedup_entries
dedup_tree(tree_hash)
else
tree_hash
end
end
private
# This function removes duplicate entries from the given tree recursively
# by caching nodes it encounters repeatedly. We only consider nodes for
# which there can actually be multiple equivalent instances (e.g. strings,
# hashes and arrays, but not `nil`s, numbers or booleans.)
#
# The algorithm uses a recursive depth-first descent with 3 cases, starting
# with a root node (the tree/hash itself):
# - a node has already been cached; in this case we return it from the cache
# - a node has not been cached yet but should be; descend into its children
# - a node is neither cached nor qualifies for caching; this is a no-op
def dedup_tree(node, nodes_seen = {})
if nodes_seen.key?(node) && distinguishable?(node)
yield nodes_seen[node]
elsif should_dedup?(node)
nodes_seen[node] = node
case node
when Array
node.each_index do |idx|
dedup_tree(node[idx], nodes_seen) do |cached_node|
node[idx] = cached_node
end
end
when Hash
node.each do |k, v|
dedup_tree(v, nodes_seen) do |cached_node|
node[k] = cached_node
end
end
end
else
node
end
end
# We do not need to consider nodes for which there cannot be multiple instances
def should_dedup?(node)
node && !(node.is_a?(Numeric) || node.is_a?(TrueClass) || node.is_a?(FalseClass))
end
# We can only safely de-dup values that are distinguishable. True value objects
# are always distinguishable by nature. Hashes however can represent entities,
# which are identified by ID, not value. We therefore disallow de-duping hashes
# that do not have an `id` field, since we might risk dropping entities that
# have equal attributes yet different identities.
def distinguishable?(node)
if node.is_a?(Hash)
node.key?('id')
else
true
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class TreeRestorer
LARGE_PROJECT_FILE_SIZE_BYTES = 500.megabyte
attr_reader :user
attr_reader :shared
attr_reader :project
def initialize(user:, shared:, project:)
@user = user
@shared = shared
@project = project
@tree_loader = TreeLoader.new
end
def restore
@tree_hash = read_tree_hash
@project_members = @tree_hash.delete('project_members')
RelationRenameService.rename(@tree_hash)
if relation_tree_restorer.restore
import_failure_service.with_retry(action: 'set_latest_merge_request_diff_ids!') do
@project.merge_requests.set_latest_merge_request_diff_ids!
end
true
else
false
end
rescue => e
@shared.error(e)
false
end
private
def large_project?(path)
File.size(path) >= LARGE_PROJECT_FILE_SIZE_BYTES
end
def read_tree_hash
path = File.join(@shared.export_path, 'project.json')
dedup_entries = large_project?(path) &&
Feature.enabled?(:dedup_project_import_metadata, project.group)
@tree_loader.load(path, dedup_entries: dedup_entries)
rescue => e
Rails.logger.error("Import/Export error: #{e.message}") # rubocop:disable Gitlab/RailsLogger
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
end
def relation_tree_restorer
@relation_tree_restorer ||= RelationTreeRestorer.new(
user: @user,
shared: @shared,
importable: @project,
tree_hash: @tree_hash,
object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader
)
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @project_members,
user: @user,
importable: @project)
end
def object_builder
Project::ObjectBuilder
end
def relation_factory
Project::RelationFactory
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def import_failure_service
@import_failure_service ||= ImportFailureService.new(@project)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
module Project
class TreeSaver
attr_reader :full_path
def initialize(project:, current_user:, shared:, params: {})
@params = params
@project = project
@current_user = current_user
@shared = shared
@full_path = File.join(@shared.export_path, ImportExport.project_filename)
end
def save
project_tree = tree_saver.serialize(@project, reader.project_tree)
fix_project_tree(project_tree)
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
true
rescue => e
@shared.error(e)
false
end
private
# Aware that the resulting hash needs to be pure-hash and
# does not include any AR objects anymore, only objects that run `.to_json`
def fix_project_tree(project_tree)
if @params[:description].present?
project_tree['description'] = @params[:description]
end
project_tree['project_members'] += group_members_array
RelationRenameService.add_new_associations(project_tree)
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def group_members_array
group_members.as_json(reader.group_members_tree).each do |group_member|
group_member['source_type'] = 'Project' # Make group members project members of the future import
end
end
def group_members
return [] unless @current_user.can?(:admin_group, @project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
end
def tree_saver
@tree_saver ||= RelationTreeSaver.new
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class ProjectRelationFactory < BaseRelationFactory
prepend_if_ee('::EE::Gitlab::ImportExport::ProjectRelationFactory') # rubocop: disable Cop/InjectEnterpriseEditionModule
OVERRIDES = { snippets: :project_snippets,
ci_pipelines: 'Ci::Pipeline',
pipelines: 'Ci::Pipeline',
stages: 'Ci::Stage',
statuses: 'commit_status',
triggers: 'Ci::Trigger',
pipeline_schedules: 'Ci::PipelineSchedule',
builds: 'Ci::Build',
runners: 'Ci::Runner',
hooks: 'ProjectHook',
merge_access_levels: 'ProtectedBranch::MergeAccessLevel',
push_access_levels: 'ProtectedBranch::PushAccessLevel',
create_access_levels: 'ProtectedTag::CreateAccessLevel',
labels: :project_labels,
priorities: :label_priorities,
auto_devops: :project_auto_devops,
label: :project_label,
custom_attributes: 'ProjectCustomAttribute',
project_badges: 'Badge',
metrics: 'MergeRequest::Metrics',
ci_cd_settings: 'ProjectCiCdSetting',
error_tracking_setting: 'ErrorTracking::ProjectErrorTrackingSetting',
links: 'Releases::Link',
metrics_setting: 'ProjectMetricsSetting' }.freeze
BUILD_MODELS = %i[Ci::Build commit_status].freeze
GROUP_REFERENCES = %w[group_id].freeze
PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
EXISTING_OBJECT_RELATIONS = %i[
milestone
milestones
label
labels
project_label
project_labels
group_label
group_labels
project_feature
merge_request
epic
ProjectCiCdSetting
container_expiration_policy
].freeze
def create
@object = super
# We preload the project, user, and group to re-use objects
@object = preload_keys(@object, PROJECT_REFERENCES, @importable)
@object = preload_keys(@object, GROUP_REFERENCES, @importable.group)
@object = preload_keys(@object, USER_REFERENCES, @user)
end
private
def invalid_relation?
# Do not create relation if it is:
# - An unknown service
# - A legacy trigger
unknown_service? ||
(!Feature.enabled?(:use_legacy_pipeline_triggers, @importable) && legacy_trigger?)
end
def setup_models
case @relation_name
when :merge_request_diff_files then setup_diff
when :notes then setup_note
when :'Ci::Pipeline' then setup_pipeline
when *BUILD_MODELS then setup_build
end
update_project_references
update_group_references
end
def generate_imported_object
if @relation_name == :merge_requests
MergeRequestParser.new(@importable, @relation_hash.delete('diff_head_sha'), super, @relation_hash).parse!
else
super
end
end
def update_project_references
# If source and target are the same, populate them with the new project ID.
if @relation_hash['source_project_id']
@relation_hash['source_project_id'] = same_source_and_target? ? @relation_hash['project_id'] : MergeRequestParser::FORKED_PROJECT_ID
end
@relation_hash['target_project_id'] = @relation_hash['project_id'] if @relation_hash['target_project_id']
end
def same_source_and_target?
@relation_hash['target_project_id'] && @relation_hash['target_project_id'] == @relation_hash['source_project_id']
end
def update_group_references
return unless existing_object?
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @importable.namespace_id
end
def setup_build
@relation_hash.delete('trace') # old export files have trace
@relation_hash.delete('token')
@relation_hash.delete('commands')
@relation_hash.delete('artifacts_file_store')
@relation_hash.delete('artifacts_metadata_store')
@relation_hash.delete('artifacts_size')
end
def setup_diff
@relation_hash['diff'] = @relation_hash.delete('utf8_diff')
end
def setup_pipeline
@relation_hash.fetch('stages', []).each do |stage|
stage.statuses.each do |status|
status.pipeline = imported_object
end
end
end
def unknown_service?
@relation_name == :services && parsed_relation_hash['type'] &&
!Object.const_defined?(parsed_relation_hash['type'])
end
def legacy_trigger?
@relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
end
def preload_keys(object, references, value)
return object unless value
references.each do |key|
attribute = "#{key.delete_suffix('_id')}=".to_sym
next unless object.respond_to?(key) && object.respond_to?(attribute)
if object.read_attribute(key) == value&.id
object.public_send(attribute, value) # rubocop:disable GitlabSecurity/PublicSend
end
end
object
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class ProjectTreeLoader
def load(path, dedup_entries: false)
tree_hash = ActiveSupport::JSON.decode(IO.read(path))
if dedup_entries
dedup_tree(tree_hash)
else
tree_hash
end
end
private
# This function removes duplicate entries from the given tree recursively
# by caching nodes it encounters repeatedly. We only consider nodes for
# which there can actually be multiple equivalent instances (e.g. strings,
# hashes and arrays, but not `nil`s, numbers or booleans.)
#
# The algorithm uses a recursive depth-first descent with 3 cases, starting
# with a root node (the tree/hash itself):
# - a node has already been cached; in this case we return it from the cache
# - a node has not been cached yet but should be; descend into its children
# - a node is neither cached nor qualifies for caching; this is a no-op
def dedup_tree(node, nodes_seen = {})
if nodes_seen.key?(node) && distinguishable?(node)
yield nodes_seen[node]
elsif should_dedup?(node)
nodes_seen[node] = node
case node
when Array
node.each_index do |idx|
dedup_tree(node[idx], nodes_seen) do |cached_node|
node[idx] = cached_node
end
end
when Hash
node.each do |k, v|
dedup_tree(v, nodes_seen) do |cached_node|
node[k] = cached_node
end
end
end
else
node
end
end
# We do not need to consider nodes for which there cannot be multiple instances
def should_dedup?(node)
node && !(node.is_a?(Numeric) || node.is_a?(TrueClass) || node.is_a?(FalseClass))
end
# We can only safely de-dup values that are distinguishable. True value objects
# are always distinguishable by nature. Hashes however can represent entities,
# which are identified by ID, not value. We therefore disallow de-duping hashes
# that do not have an `id` field, since we might risk dropping entities that
# have equal attributes yet different identities.
def distinguishable?(node)
if node.is_a?(Hash)
node.key?('id')
else
true
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class ProjectTreeRestorer
LARGE_PROJECT_FILE_SIZE_BYTES = 500.megabyte
attr_reader :user
attr_reader :shared
attr_reader :project
def initialize(user:, shared:, project:)
@user = user
@shared = shared
@project = project
@tree_loader = ProjectTreeLoader.new
end
def restore
@tree_hash = read_tree_hash
@project_members = @tree_hash.delete('project_members')
RelationRenameService.rename(@tree_hash)
if relation_tree_restorer.restore
import_failure_service.with_retry(action: 'set_latest_merge_request_diff_ids!') do
@project.merge_requests.set_latest_merge_request_diff_ids!
end
true
else
false
end
rescue => e
@shared.error(e)
false
end
private
def large_project?(path)
File.size(path) >= LARGE_PROJECT_FILE_SIZE_BYTES
end
def read_tree_hash
path = File.join(@shared.export_path, 'project.json')
dedup_entries = large_project?(path) &&
Feature.enabled?(:dedup_project_import_metadata, project.group)
@tree_loader.load(path, dedup_entries: dedup_entries)
rescue => e
Rails.logger.error("Import/Export error: #{e.message}") # rubocop:disable Gitlab/RailsLogger
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
end
def relation_tree_restorer
@relation_tree_restorer ||= RelationTreeRestorer.new(
user: @user,
shared: @shared,
importable: @project,
tree_hash: @tree_hash,
object_builder: object_builder,
members_mapper: members_mapper,
relation_factory: relation_factory,
reader: reader
)
end
def members_mapper
@members_mapper ||= Gitlab::ImportExport::MembersMapper.new(exported_members: @project_members,
user: @user,
importable: @project)
end
def object_builder
Gitlab::ImportExport::GroupProjectObjectBuilder
end
def relation_factory
Gitlab::ImportExport::ProjectRelationFactory
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def import_failure_service
@import_failure_service ||= ImportFailureService.new(@project)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class ProjectTreeSaver
attr_reader :full_path
def initialize(project:, current_user:, shared:, params: {})
@params = params
@project = project
@current_user = current_user
@shared = shared
@full_path = File.join(@shared.export_path, ImportExport.project_filename)
end
def save
project_tree = tree_saver.serialize(@project, reader.project_tree)
fix_project_tree(project_tree)
tree_saver.save(project_tree, @shared.export_path, ImportExport.project_filename)
true
rescue => e
@shared.error(e)
false
end
private
# Aware that the resulting hash needs to be pure-hash and
# does not include any AR objects anymore, only objects that run `.to_json`
def fix_project_tree(project_tree)
if @params[:description].present?
project_tree['description'] = @params[:description]
end
project_tree['project_members'] += group_members_array
RelationRenameService.add_new_associations(project_tree)
end
def reader
@reader ||= Gitlab::ImportExport::Reader.new(shared: @shared)
end
def group_members_array
group_members.as_json(reader.group_members_tree).each do |group_member|
group_member['source_type'] = 'Project' # Make group members project members of the future import
end
end
def group_members
return [] unless @current_user.can?(:admin_group, @project.group)
# We need `.where.not(user_id: nil)` here otherwise when a group has an
# invitee, it would make the following query return 0 rows since a NULL
# user_id would be present in the subquery
# See http://stackoverflow.com/questions/129077/not-in-clause-and-null-values
non_null_user_ids = @project.project_members.where.not(user_id: nil).select(:user_id)
GroupMembersFinder.new(@project.group).execute.where.not(user_id: non_null_user_ids)
end
def tree_saver
@tree_saver ||= RelationTreeSaver.new
end
end
end
end
...@@ -69,7 +69,7 @@ module Gitlab ...@@ -69,7 +69,7 @@ module Gitlab
def process_relation_item!(relation_key, relation_definition, relation_index, data_hash) def process_relation_item!(relation_key, relation_definition, relation_index, data_hash)
relation_object = build_relation(relation_key, relation_definition, data_hash) relation_object = build_relation(relation_key, relation_definition, data_hash)
return unless relation_object return unless relation_object
return if importable_class == Project && group_model?(relation_object) return if importable_class == ::Project && group_model?(relation_object)
relation_object.assign_attributes(importable_class_sym => @importable) relation_object.assign_attributes(importable_class_sym => @importable)
...@@ -110,7 +110,7 @@ module Gitlab ...@@ -110,7 +110,7 @@ module Gitlab
excluded_keys: excluded_keys_for_relation(importable_class_sym)) excluded_keys: excluded_keys_for_relation(importable_class_sym))
@importable.assign_attributes(params) @importable.assign_attributes(params)
@importable.drop_visibility_level! if importable_class == Project @importable.drop_visibility_level! if importable_class == ::Project
Gitlab::Timeless.timeless(@importable) do Gitlab::Timeless.timeless(@importable) do
@importable.save! @importable.save!
......
...@@ -1442,7 +1442,7 @@ TodoService ...@@ -1442,7 +1442,7 @@ TodoService
marks a single todo id as done marks a single todo id as done
caches the number of todos of a user caches the number of todos of a user
Gitlab::ImportExport::ProjectTreeSaver Gitlab::ImportExport::Project::TreeSaver
saves the project tree into a json object saves the project tree into a json object
saves project successfully saves project successfully
JSON JSON
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::BaseObjectBuilder do describe Gitlab::ImportExport::Base::ObjectBuilder do
let(:project) do let(:project) do
create(:project, :repository, create(:project, :repository,
:builds_disabled, :builds_disabled,
...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::BaseObjectBuilder do ...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::BaseObjectBuilder do
path: 'project') path: 'project')
end end
let(:klass) { Milestone } let(:klass) { Milestone }
let(:attributes) { { 'title' => 'Test BaseObjectBuilder Milestone', 'project' => project } } let(:attributes) { { 'title' => 'Test Base::ObjectBuilder Milestone', 'project' => project } }
subject { described_class.build(klass, attributes) } subject { described_class.build(klass, attributes) }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::BaseRelationFactory do describe Gitlab::ImportExport::Base::RelationFactory do
let(:user) { create(:admin) } let(:user) { create(:admin) }
let(:project) { create(:project) } let(:project) { create(:project) }
let(:members_mapper) { double('members_mapper').as_null_object } let(:members_mapper) { double('members_mapper').as_null_object }
...@@ -13,7 +13,7 @@ describe Gitlab::ImportExport::BaseRelationFactory do ...@@ -13,7 +13,7 @@ describe Gitlab::ImportExport::BaseRelationFactory do
subject do subject do
described_class.create(relation_sym: relation_sym, described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash, relation_hash: relation_hash,
object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder, object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper, members_mapper: members_mapper,
user: user, user: user,
importable: project, importable: project,
......
...@@ -24,11 +24,11 @@ describe 'forked project import' do ...@@ -24,11 +24,11 @@ describe 'forked project import' do
end end
let(:saver) do let(:saver) do
Gitlab::ImportExport::ProjectTreeSaver.new(project: project_with_repo, current_user: user, shared: shared) Gitlab::ImportExport::Project::TreeSaver.new(project: project_with_repo, current_user: user, shared: shared)
end end
let(:restorer) do let(:restorer) do
Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) Gitlab::ImportExport::Project::TreeRestorer.new(user: user, shared: shared, project: project)
end end
before do before do
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupObjectBuilder do describe Gitlab::ImportExport::Group::ObjectBuilder do
let(:group) { create(:group) } let(:group) { create(:group) }
let(:base_attributes) do let(:base_attributes) do
{ {
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupRelationFactory do describe Gitlab::ImportExport::Group::RelationFactory do
let(:group) { create(:group) } let(:group) { create(:group) }
let(:members_mapper) { double('members_mapper').as_null_object } let(:members_mapper) { double('members_mapper').as_null_object }
let(:user) { create(:admin) } let(:user) { create(:admin) }
...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::GroupRelationFactory do ...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::GroupRelationFactory do
described_class.create(relation_sym: relation_sym, described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash, relation_hash: relation_hash,
members_mapper: members_mapper, members_mapper: members_mapper,
object_builder: Gitlab::ImportExport::GroupObjectBuilder, object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
user: user, user: user,
importable: group, importable: group,
excluded_keys: excluded_keys) excluded_keys: excluded_keys)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupTreeRestorer do describe Gitlab::ImportExport::Group::TreeRestorer do
include ImportExport::CommonUtil include ImportExport::CommonUtil
let(:shared) { Gitlab::ImportExport::Shared.new(group) } let(:shared) { Gitlab::ImportExport::Shared.new(group) }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupTreeSaver do describe Gitlab::ImportExport::Group::TreeSaver do
describe 'saves the group tree into a json object' do describe 'saves the group tree into a json object' do
let(:shared) { Gitlab::ImportExport::Shared.new(group) } let(:shared) { Gitlab::ImportExport::Shared.new(group) }
let(:group_tree_saver) { described_class.new(group: group, current_user: user, shared: shared) } let(:group_tree_saver) { described_class.new(group: group, current_user: user, shared: shared) }
...@@ -72,7 +72,7 @@ describe Gitlab::ImportExport::GroupTreeSaver do ...@@ -72,7 +72,7 @@ describe Gitlab::ImportExport::GroupTreeSaver do
# except: # except:
# context 'with description override' do # context 'with description override' do
# context 'group members' do # context 'group members' do
# ^ These are specific for the groupTreeSaver # ^ These are specific for the Group::TreeSaver
context 'JSON' do context 'JSON' do
let(:saved_group_json) do let(:saved_group_json) do
group_tree_saver.save group_tree_saver.save
......
...@@ -63,7 +63,7 @@ describe Gitlab::ImportExport::Importer do ...@@ -63,7 +63,7 @@ describe Gitlab::ImportExport::Importer do
end end
it 'restores the ProjectTree' do it 'restores the ProjectTree' do
expect(Gitlab::ImportExport::ProjectTreeRestorer).to receive(:new).and_call_original expect(Gitlab::ImportExport::Project::TreeRestorer).to receive(:new).and_call_original
importer.execute importer.execute
end end
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::GroupProjectObjectBuilder do describe Gitlab::ImportExport::Project::ObjectBuilder do
let!(:group) { create(:group, :private) } let!(:group) { create(:group, :private) }
let!(:subgroup) { create(:group, :private, parent: group) } let!(:subgroup) { create(:group, :private, parent: group) }
let!(:project) do let!(:project) do
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::ProjectRelationFactory do describe Gitlab::ImportExport::Project::RelationFactory do
let(:group) { create(:group) } let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) } let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object } let(:members_mapper) { double('members_mapper').as_null_object }
...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::ProjectRelationFactory do ...@@ -11,7 +11,7 @@ describe Gitlab::ImportExport::ProjectRelationFactory do
let(:created_object) do let(:created_object) do
described_class.create(relation_sym: relation_sym, described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash, relation_hash: relation_hash,
object_builder: Gitlab::ImportExport::GroupProjectObjectBuilder, object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper, members_mapper: members_mapper,
user: user, user: user,
importable: project, importable: project,
...@@ -243,11 +243,11 @@ describe Gitlab::ImportExport::ProjectRelationFactory do ...@@ -243,11 +243,11 @@ describe Gitlab::ImportExport::ProjectRelationFactory do
context 'Project references' do context 'Project references' do
let(:relation_sym) { :project_foo_model } let(:relation_sym) { :project_foo_model }
let(:relation_hash) do let(:relation_hash) do
Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge) Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES.map { |ref| { ref => 99 } }.inject(:merge)
end end
class ProjectFooModel < FooModel class ProjectFooModel < FooModel
attr_accessor(*Gitlab::ImportExport::ProjectRelationFactory::PROJECT_REFERENCES) attr_accessor(*Gitlab::ImportExport::Project::RelationFactory::PROJECT_REFERENCES)
end end
before do before do
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::ProjectTreeLoader do describe Gitlab::ImportExport::Project::TreeLoader do
let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/with_duplicates.json' } let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/with_duplicates.json' }
let(:project_tree) { JSON.parse(File.read(fixture)) } let(:project_tree) { JSON.parse(File.read(fixture)) }
......
# frozen_string_literal: true # frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
include ImportExport::CommonUtil
describe Gitlab::ImportExport::ProjectTreeRestorer do describe Gitlab::ImportExport::Project::TreeRestorer do
include ImportExport::CommonUtil include ImportExport::CommonUtil
let(:shared) { project.import_export_shared } let(:shared) { project.import_export_shared }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ImportExport::ProjectTreeSaver do describe Gitlab::ImportExport::Project::TreeSaver do
describe 'saves the project tree into a json object' do describe 'saves the project tree into a json object' do
let(:shared) { project.import_export_shared } let(:shared) { project.import_export_shared }
let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) } let(:project_tree_saver) { described_class.new(project: project, current_user: user, shared: shared) }
...@@ -75,7 +75,7 @@ describe Gitlab::ImportExport::ProjectTreeSaver do ...@@ -75,7 +75,7 @@ describe Gitlab::ImportExport::ProjectTreeSaver do
# except: # except:
# context 'with description override' do # context 'with description override' do
# context 'group members' do # context 'group members' do
# ^ These are specific for the ProjectTreeSaver # ^ These are specific for the Project::TreeSaver
context 'JSON' do context 'JSON' do
let(:saved_project_json) do let(:saved_project_json) do
project_tree_saver.save project_tree_saver.save
......
...@@ -22,7 +22,7 @@ describe Gitlab::ImportExport::RelationRenameService do ...@@ -22,7 +22,7 @@ describe Gitlab::ImportExport::RelationRenameService do
end end
context 'when importing' do context 'when importing' do
let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) } let(:project_tree_restorer) { Gitlab::ImportExport::Project::TreeRestorer.new(user: user, shared: shared, project: project) }
let(:file_content) { IO.read(File.join(shared.export_path, 'project.json')) } let(:file_content) { IO.read(File.join(shared.export_path, 'project.json')) }
let(:json_file) { ActiveSupport::JSON.decode(file_content) } let(:json_file) { ActiveSupport::JSON.decode(file_content) }
...@@ -99,7 +99,7 @@ describe Gitlab::ImportExport::RelationRenameService do ...@@ -99,7 +99,7 @@ describe Gitlab::ImportExport::RelationRenameService do
let(:relation_tree_saver) { Gitlab::ImportExport::RelationTreeSaver.new } let(:relation_tree_saver) { Gitlab::ImportExport::RelationTreeSaver.new }
let(:project_tree_saver) do let(:project_tree_saver) do
Gitlab::ImportExport::ProjectTreeSaver.new( Gitlab::ImportExport::Project::TreeSaver.new(
project: project, current_user: user, shared: shared) project: project, current_user: user, shared: shared)
end end
......
...@@ -39,8 +39,8 @@ describe Gitlab::ImportExport::RelationTreeRestorer do ...@@ -39,8 +39,8 @@ describe Gitlab::ImportExport::RelationTreeRestorer do
context 'when restoring a project' do context 'when restoring a project' do
let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' } let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' }
let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') } let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
let(:object_builder) { Gitlab::ImportExport::GroupProjectObjectBuilder } let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder }
let(:relation_factory) { Gitlab::ImportExport::ProjectRelationFactory } let(:relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) } let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
let(:tree_hash) { importable_hash } let(:tree_hash) { importable_hash }
......
...@@ -19,7 +19,7 @@ describe Groups::ImportExport::ExportService do ...@@ -19,7 +19,7 @@ describe Groups::ImportExport::ExportService do
end end
it 'saves the models' do it 'saves the models' do
expect(Gitlab::ImportExport::GroupTreeSaver).to receive(:new).and_call_original expect(Gitlab::ImportExport::Group::TreeSaver).to receive(:new).and_call_original
service.execute service.execute
end end
......
...@@ -27,7 +27,7 @@ describe Projects::ImportExport::ExportService do ...@@ -27,7 +27,7 @@ describe Projects::ImportExport::ExportService do
end end
it 'saves the models' do it 'saves the models' do
expect(Gitlab::ImportExport::ProjectTreeSaver).to receive(:new).and_call_original expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).and_call_original
service.execute service.execute
end end
......
...@@ -34,13 +34,13 @@ module ImportExport ...@@ -34,13 +34,13 @@ module ImportExport
end end
def get_project_restorer(project, import_path) def get_project_restorer(project, import_path)
Gitlab::ImportExport::ProjectTreeRestorer.new( Gitlab::ImportExport::Project::TreeRestorer.new(
user: project.creator, shared: get_shared_env(path: import_path), project: project user: project.creator, shared: get_shared_env(path: import_path), project: project
) )
end end
def get_project_saver(project, export_path) def get_project_saver(project, export_path)
Gitlab::ImportExport::ProjectTreeSaver.new( Gitlab::ImportExport::Project::TreeSaver.new(
project: project, current_user: project.creator, shared: get_shared_env(path: export_path) project: project, current_user: project.creator, shared: get_shared_env(path: export_path)
) )
end end
......
...@@ -36,8 +36,8 @@ module ConfigurationHelper ...@@ -36,8 +36,8 @@ module ConfigurationHelper
end end
def relation_class_for_name(relation_name) def relation_class_for_name(relation_name)
relation_name = Gitlab::ImportExport::ProjectRelationFactory.overrides[relation_name.to_sym] || relation_name relation_name = Gitlab::ImportExport::Project::RelationFactory.overrides[relation_name.to_sym] || relation_name
Gitlab::ImportExport::ProjectRelationFactory.relation_class(relation_name) Gitlab::ImportExport::Project::RelationFactory.relation_class(relation_name)
end end
def parsed_attributes(relation_name, attributes, config: Gitlab::ImportExport.config_file) def parsed_attributes(relation_name, attributes, config: Gitlab::ImportExport.config_file)
......
# frozen_string_literal: true # frozen_string_literal: true
# Shared examples for ProjectTreeRestorer (shared to allow the testing # Shared examples for Project::TreeRestorer (shared to allow the testing
# of EE-specific features) # of EE-specific features)
RSpec.shared_examples 'restores project successfully' do |**results| RSpec.shared_examples 'restores project successfully' do |**results|
it 'restores the project' do it 'restores the project' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment