Commit 515763c1 authored by Stan Hu's avatar Stan Hu

Merge branch...

Merge branch 'kassio/bulkimports-avoid-exception-in-epics-pipelines-when-group-does-not-exist' into 'master'

BulkImports: Avoid exception in EpicsPipeline when there's no Group

See merge request gitlab-org/gitlab!58949
parents 5eccde09 b70dfdb9
...@@ -9,13 +9,15 @@ module EE ...@@ -9,13 +9,15 @@ module EE
def initialize(context) def initialize(context)
super(context) super(context)
return if context.group.blank?
@epic_iids = context.group.epics.order(iid: :desc).pluck(:iid) # rubocop: disable CodeReuse/ActiveRecord @epic_iids = context.group.epics.order(iid: :desc).pluck(:iid) # rubocop: disable CodeReuse/ActiveRecord
set_next_epic set_next_epic
end end
def run def run
return skip!('Skipping because group has no epics') if current_epic_iid.blank? return skip!(skip_reason) if current_epic_iid.blank?
super super
end end
...@@ -43,6 +45,14 @@ module EE ...@@ -43,6 +45,14 @@ module EE
def current_epic_iid def current_epic_iid
context.extra[:epic_iid] context.extra[:epic_iid]
end end
def skip_reason
if context.group.blank?
'Skipping because bulk import has no group'
else
'Skipping because group has no epics'
end
end
end end
end end
end end
......
...@@ -3,11 +3,6 @@ ...@@ -3,11 +3,6 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe EE::BulkImports::Pipeline::EpicBase do RSpec.describe EE::BulkImports::Pipeline::EpicBase do
let_it_be(:group) { create(:group) }
let_it_be(:entity) { create(:bulk_import_entity, group: group) }
let_it_be(:pipeline_tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(pipeline_tracker) }
let(:pipeline_class) do let(:pipeline_class) do
Class.new(EE::BulkImports::Pipeline::EpicBase) do Class.new(EE::BulkImports::Pipeline::EpicBase) do
def extract(_) def extract(_)
...@@ -30,16 +25,18 @@ RSpec.describe EE::BulkImports::Pipeline::EpicBase do ...@@ -30,16 +25,18 @@ RSpec.describe EE::BulkImports::Pipeline::EpicBase do
stub_const('MyPipeline', pipeline_class) stub_const('MyPipeline', pipeline_class)
end end
context 'when the group has no epics' do context 'when the group was not imported' do
let_it_be(:pipeline_tracker) { create(:bulk_import_tracker) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(pipeline_tracker) }
it 'skips the pipeline' do it 'skips the pipeline' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger| expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:warn) expect(logger).to receive(:warn)
.with( .with(
bulk_import_id: entity.bulk_import.id, bulk_import_id: pipeline_tracker.entity.bulk_import.id,
bulk_import_entity_id: entity.id, bulk_import_entity_id: pipeline_tracker.entity.id,
bulk_import_entity_type: entity.source_type, bulk_import_entity_type: pipeline_tracker.entity.source_type,
context_extra: { epic_iid: nil }, message: 'Skipping because bulk import has no group',
message: 'Skipping because group has no epics',
pipeline_class: 'MyPipeline' pipeline_class: 'MyPipeline'
) )
end end
...@@ -49,86 +46,112 @@ RSpec.describe EE::BulkImports::Pipeline::EpicBase do ...@@ -49,86 +46,112 @@ RSpec.describe EE::BulkImports::Pipeline::EpicBase do
end end
end end
context 'when the group has one epic with one page of data' do context 'when the group was already imported' do
it 'runs the pipeline for each epic' do let_it_be(:group) { create(:group) }
create(:epic, group: group) let_it_be(:entity) { create(:bulk_import_entity, group: group) }
let_it_be(:pipeline_tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(pipeline_tracker) }
context 'when the group has no epics' do
it 'skips the pipeline' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:warn)
.with(
bulk_import_id: entity.bulk_import.id,
bulk_import_entity_id: entity.id,
bulk_import_entity_type: entity.source_type,
context_extra: { epic_iid: nil },
message: 'Skipping because group has no epics',
pipeline_class: 'MyPipeline'
)
end
expect { subject.run }
.to change(pipeline_tracker, :status_name).to(:skipped)
end
end
context 'when the group has one epic with one page of data' do
it 'runs the pipeline for each epic' do
create(:epic, group: group)
expect(subject) expect(subject)
.to receive(:run) .to receive(:run)
.once .once
.and_call_original .and_call_original
subject.run subject.run
end
end end
end
context 'when the group has one epic with multiple page of data' do context 'when the group has one epic with multiple page of data' do
it 'runs the pipeline for page' do it 'runs the pipeline for page' do
create(:epic, group: group) create(:epic, group: group)
first_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: { first_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: {
'has_next_page' => true, 'has_next_page' => true,
'next_page' => 'page' 'next_page' => 'page'
}) })
last_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: { last_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: {
'has_next_page' => false 'has_next_page' => false
}) })
expect(subject) expect(subject)
.to receive(:extract) .to receive(:extract)
.and_return(first_page, last_page) .and_return(first_page, last_page)
expect(subject) expect(subject)
.to receive(:run) .to receive(:run)
.twice .twice
.and_call_original .and_call_original
subject.run subject.run
end
end end
end
context 'when the group has multiple epics with one page of data' do context 'when the group has multiple epics with one page of data' do
it 'runs the pipeline once for each epic' do it 'runs the pipeline once for each epic' do
create(:epic, group: group) create(:epic, group: group)
create(:epic, group: group) create(:epic, group: group)
expect(subject) expect(subject)
.to receive(:run) .to receive(:run)
.twice .twice
.and_call_original .and_call_original
subject.run subject.run
end
end end
end
context 'when the group has multiple epics with multiple pages of data' do context 'when the group has multiple epics with multiple pages of data' do
it 'runs the pipeline once for each page of each epic' do it 'runs the pipeline once for each page of each epic' do
create(:epic, group: group) create(:epic, group: group)
create(:epic, group: group) create(:epic, group: group)
first_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: { first_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: {
'has_next_page' => true, 'has_next_page' => true,
'next_page' => 'page' 'next_page' => 'page'
}) })
last_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: { last_page = ::BulkImports::Pipeline::ExtractedData.new(page_info: {
'has_next_page' => false 'has_next_page' => false
}) })
expect(subject) expect(subject)
.to receive(:extract) .to receive(:extract)
.and_return( .and_return(
first_page, # first epic first_page, # first epic
last_page, # first epic last_page, # first epic
first_page, # last epic first_page, # last epic
last_page # last epic last_page # last epic
) )
expect(subject) expect(subject)
.to receive(:run) .to receive(:run)
.exactly(4).times .exactly(4).times
.and_call_original .and_call_original
subject.run subject.run
end
end end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment