Commit 92a06b97 authored by Dmytro Zaporozhets (DZ)'s avatar Dmytro Zaporozhets (DZ)

Merge branch 'georgekoltsov/bulk-import-epic-parent-child-association' into 'master'

Preserve Epic parent-child association when importing using Bulk Import

See merge request gitlab-org/gitlab!52662
parents 081562d2 242cc07a
---
title: Preserve Epic parent-child association when importing using Bulk Import
merge_request: 52662
author:
type: changed
......@@ -21,6 +21,7 @@ module EE
has_next_page: hasNextPage
}
nodes {
iid
title
description
state
......@@ -33,6 +34,14 @@ module EE
due_date_is_fixed: dueDateIsFixed
relative_position: relativePosition
confidential
parent {
iid
}
children {
nodes {
iid
}
}
}
}
}
......
......@@ -5,16 +5,20 @@ module EE
module Groups
module Loaders
class EpicsLoader
NotAllowedError = Class.new(StandardError)
def initialize(options = {})
@options = options
end
def load(context, data)
::Epics::CreateService.new(
context.entity.group,
context.current_user,
data
).execute
raise NotAllowedError unless context.current_user.can?(:create_epic, context.group)
# Use `Epic` directly when creating new epics
# instead of `Epics::CreateService` since several
# attributes like author_id (which might not be current_user),
# group_id, parent, children need to be custom set
::Epic.create!(data)
end
end
end
......
......@@ -11,6 +11,7 @@ module EE
query: EE::BulkImports::Groups::Graphql::GetEpicsQuery
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
transformer EE::BulkImports::Groups::Transformers::EpicAttributesTransformer
loader EE::BulkImports::Groups::Loaders::EpicsLoader
......
# frozen_string_literal: true
module EE
module BulkImports
module Groups
module Transformers
class EpicAttributesTransformer
def initialize(*args); end
def transform(context, data)
data
.then { |data| add_group_id(context, data) }
.then { |data| add_author_id(context, data) }
.then { |data| add_parent(context, data) }
.then { |data| add_children(context, data) }
end
private
def add_group_id(context, data)
data.merge('group_id' => context.entity.namespace_id)
end
def add_author_id(context, data)
data.merge('author_id' => context.current_user.id)
end
def add_parent(context, data)
data.merge(
'parent' => context.entity.group.epics.find_by_iid(data.dig('parent', 'iid'))
)
end
def add_children(context, data)
nodes = Array.wrap(data.dig('children', 'nodes'))
children_iids = nodes.filter_map { |child| child['iid'] }
data.merge('children' => context.entity.group.epics.where(iid: children_iids)) # rubocop: disable CodeReuse/ActiveRecord
end
end
end
end
end
end
......@@ -6,18 +6,25 @@ RSpec.describe EE::BulkImports::Groups::Loaders::EpicsLoader do
describe '#load' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:entity) { create(:bulk_import_entity, group: group) }
let(:bulk_import) { create(:bulk_import, user: user) }
let(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
let(:context) { BulkImports::Pipeline::Context.new(entity) }
let(:data) do
{
'title' => 'epic1',
'state' => 'opened',
'confidential' => false
'confidential' => false,
'iid' => 1,
'author_id' => user.id,
'group_id' => group.id
}
end
subject { described_class.new }
before do
stub_licensed_features(epics: true)
group.add_owner(user)
end
it 'creates the epic' do
expect { subject.load(context, data) }.to change(::Epic, :count).by(1)
......
......@@ -6,18 +6,25 @@ RSpec.describe EE::BulkImports::Groups::Pipelines::EpicsPipeline do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:cursor) { 'cursor' }
let(:bulk_import) { create(:bulk_import, user: user) }
let(:entity) do
create(
:bulk_import_entity,
source_full_path: 'source/full/path',
destination_name: 'My Destination Group',
destination_namespace: group.full_path,
group: group
group: group,
bulk_import: bulk_import
)
end
let(:context) { BulkImports::Pipeline::Context.new(entity) }
before do
stub_licensed_features(epics: true)
group.add_owner(user)
end
describe '#run' do
it 'imports group epics into destination group' do
first_page = extractor_data(has_next_page: true, cursor: cursor)
......@@ -82,7 +89,8 @@ RSpec.describe EE::BulkImports::Groups::Pipelines::EpicsPipeline do
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: EE::BulkImports::Groups::Transformers::EpicAttributesTransformer, options: nil }
)
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe EE::BulkImports::Groups::Transformers::EpicAttributesTransformer do
describe '#transform' do
let(:user) { create(:user) }
let(:group) { create(:group, name: 'My Source Group') }
let(:entity) do
instance_double(
BulkImports::Entity,
group: group,
namespace_id: group.id
)
end
let(:context) do
instance_double(
BulkImports::Pipeline::Context,
current_user: user,
entity: entity
)
end
let(:data) do
{
'iid' => '7',
'title' => 'Epic Title',
'description' => 'Epic Description',
'state' => 'opened',
'parent' => {
'iid' => parent_iid
},
'children' => {
'nodes' => [
{
'iid' => child_iid
}
]
}
}
end
context 'when parent and child iids are nil' do
let(:parent_iid) { nil }
let(:child_iid) { nil }
it 'sets group_id, author_id from context' do
transformed_data = subject.transform(context, data)
expect(transformed_data['group_id']).to eq(group.id)
expect(transformed_data['author_id']).to eq(user.id)
expect(transformed_data['parent']).to be_nil
end
end
context 'when parent and child iids are present' do
let(:parent) { create(:epic, group: group) }
let(:child) { create(:epic, group: group) }
let(:parent_iid) { parent.iid }
let(:child_iid) { child.iid }
it 'sets parent and child epics' do
transformed_data = subject.transform(context, data)
expect(transformed_data['parent']).to eq(parent)
expect(transformed_data['children']).to contain_exactly(child)
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment