Commit d8db3d37 authored by George Koltsov's avatar George Koltsov

Add empty project migration to Group Migration

  - GitLab Group Migration now includes empty projects
    migrated from source instance of GitLab if
    bulk_import_projects feature flag is enabled
parent 809fe53f
......@@ -78,6 +78,30 @@ class BulkImports::Entity < ApplicationRecord
ERB::Util.url_encode(source_full_path)
end
def pipelines
@pipelines ||= case source_type
when 'group_entity'
BulkImports::Groups::Stage.pipelines
when 'project_entity'
BulkImports::Projects::Stage.pipelines
end
end
def pipeline_exists?(name)
pipelines.any? { |_, pipeline| pipeline.to_s == name.to_s }
end
def create_pipeline_trackers!
self.class.transaction do
pipelines.each do |stage, pipeline|
trackers.create!(
stage: stage,
pipeline_name: pipeline
)
end
end
end
private
def validate_parent_is_a_group
......
......@@ -34,8 +34,8 @@ class BulkImports::Tracker < ApplicationRecord
end
def pipeline_class
unless BulkImports::Stage.pipeline_exists?(pipeline_name)
raise NameError, "'#{pipeline_name}' is not a valid BulkImport Pipeline"
unless entity.pipeline_exists?(pipeline_name)
raise BulkImports::Error, "'#{pipeline_name}' is not a valid BulkImport Pipeline"
end
pipeline_name.constantize
......
......@@ -24,9 +24,9 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
@bulk_import.start! if @bulk_import.created?
created_entities.first(next_batch_size).each do |entity|
create_pipeline_tracker_for(entity)
entity.create_pipeline_trackers!
BulkImports::ExportRequestWorker.perform_async(entity.id)
BulkImports::ExportRequestWorker.perform_async(entity.id) if entity.group_entity?
BulkImports::EntityWorker.perform_async(entity.id)
entity.start!
......@@ -75,13 +75,4 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
def re_enqueue
BulkImportWorker.perform_in(PERFORM_DELAY, @bulk_import.id)
end
def create_pipeline_tracker_for(entity)
BulkImports::Stage.pipelines.each do |stage, pipeline|
entity.trackers.create!(
stage: stage,
pipeline_name: pipeline
)
end
end
end
---
name: bulk_import_projects
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68873
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/339941
milestone: '14.3'
type: development
group: group::import
default_enabled: false
# frozen_string_literal: true
module EE
module BulkImports
module Groups
module Stage
extend ::Gitlab::Utils::Override
private
def ee_config
@ee_config ||= {
iterations: {
pipeline: ::BulkImports::Groups::Pipelines::IterationsPipeline,
stage: 1
},
epics: {
pipeline: ::BulkImports::Groups::Pipelines::EpicsPipeline,
stage: 2
},
# Override the CE stage value for the EntityFinisher Pipeline
finisher: {
stage: 4
}
}
end
override :config
def config
@config ||= super.deep_merge(ee_config)
end
end
end
end
end
# frozen_string_literal: true
module EE
module BulkImports
module Projects
module Stage
extend ::Gitlab::Utils::Override
private
def ee_config
@ee_config ||= {}
end
override :config
def config
@config ||= super.deep_merge(ee_config)
end
end
end
end
end
# frozen_string_literal: true
module EE
module BulkImports
module Stage
extend ::Gitlab::Utils::Override
EE_CONFIG = {
iterations: {
pipeline: ::BulkImports::Groups::Pipelines::IterationsPipeline,
stage: 1
},
epics: {
pipeline: ::BulkImports::Groups::Pipelines::EpicsPipeline,
stage: 2
},
# Override the CE stage value for the EntityFinisher Pipeline
finisher: {
stage: 4
}
}.freeze
private
override :config
def config
@config ||= super.deep_merge(EE_CONFIG)
end
end
end
end
# frozen_string_literal: true
require 'fast_spec_helper'
require 'spec_helper'
RSpec.describe BulkImports::Stage do
RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
......@@ -13,15 +13,16 @@ RSpec.describe BulkImports::Stage do
[1, BulkImports::Groups::Pipelines::MilestonesPipeline],
[1, BulkImports::Groups::Pipelines::BadgesPipeline],
[1, BulkImports::Groups::Pipelines::IterationsPipeline],
[1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline],
[2, BulkImports::Groups::Pipelines::BoardsPipeline],
[2, BulkImports::Groups::Pipelines::EpicsPipeline],
[4, BulkImports::Groups::Pipelines::EntityFinisher]
[4, BulkImports::Common::Pipelines::EntityFinisher]
]
end
describe '#each' do
it 'iterates over all pipelines with the stage number' do
expect(described_class.pipelines).to match_array(pipelines)
expect(subject.pipelines).to match_array(pipelines)
end
end
end
# frozen_string_literal: true
module BulkImports
module Groups
module Common
module Pipelines
class EntityFinisher
def self.ndjson_pipeline?
......
# frozen_string_literal: true
module BulkImports
module Groups
module Graphql
module GetProjectsQuery
extend self
def to_s
<<-'GRAPHQL'
query($full_path: ID!, $cursor: String, $per_page: Int) {
group(fullPath: $full_path) {
projects(includeSubgroups: false, first: $per_page, after: $cursor) {
page_info: pageInfo {
next_page: endCursor
has_next_page: hasNextPage
}
nodes {
name
full_path: fullPath
}
}
}
}
GRAPHQL
end
def variables(context)
{
full_path: context.entity.source_full_path,
cursor: context.tracker.next_page,
per_page: ::BulkImports::Tracker::DEFAULT_PAGE_SIZE
}
end
def base_path
%w[data group projects]
end
def data_path
base_path << 'nodes'
end
def page_info_path
base_path << 'page_info'
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Groups
module Pipelines
class ProjectEntitiesPipeline
include Pipeline
extractor Common::Extractors::GraphqlExtractor, query: Graphql::GetProjectsQuery
transformer Common::Transformers::ProhibitedAttributesTransformer
def transform(context, data)
{
source_type: :project_entity,
source_full_path: data['full_path'],
destination_name: data['name'],
destination_namespace: context.entity.group.full_path,
parent_id: context.entity.id
}
end
def load(context, data)
context.bulk_import.entities.create!(data)
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Groups
class Stage < ::BulkImports::Stage
private
def config
@config ||= {
group: {
pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
stage: 0
},
avatar: {
pipeline: BulkImports::Groups::Pipelines::GroupAvatarPipeline,
stage: 1
},
subgroups: {
pipeline: BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline,
stage: 1
},
members: {
pipeline: BulkImports::Groups::Pipelines::MembersPipeline,
stage: 1
},
labels: {
pipeline: BulkImports::Groups::Pipelines::LabelsPipeline,
stage: 1
},
milestones: {
pipeline: BulkImports::Groups::Pipelines::MilestonesPipeline,
stage: 1
},
badges: {
pipeline: BulkImports::Groups::Pipelines::BadgesPipeline,
stage: 1
},
boards: {
pipeline: BulkImports::Groups::Pipelines::BoardsPipeline,
stage: 2
},
finisher: {
pipeline: BulkImports::Common::Pipelines::EntityFinisher,
stage: 3
}
}.merge(project_entities_pipeline)
end
def project_entities_pipeline
if ::Feature.enabled?(:bulk_import_projects, default_enabled: :yaml)
{
project_entities: {
pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline,
stage: 1
}
}
else
{}
end
end
end
end
end
::BulkImports::Groups::Stage.prepend_mod_with('BulkImports::Groups::Stage')
......@@ -69,8 +69,8 @@ module BulkImports
# Multiple transformers can be defined within a single
# pipeline and run sequentially for each record in the
# following order:
# - Transformers defined using `transformer` class method
# - Instance method `transform`
# - Transformers defined using `transformer` class method
#
# Instance method `transform` is always the last to run.
#
......
# frozen_string_literal: true
module BulkImports
module Projects
module Graphql
module GetProjectQuery
extend self
def to_s
<<-'GRAPHQL'
query($full_path: ID!) {
project(fullPath: $full_path) {
description
visibility
archived
created_at: createdAt
shared_runners_enabled: sharedRunnersEnabled
container_registry_enabled: containerRegistryEnabled
only_allow_merge_if_pipeline_succeeds: onlyAllowMergeIfPipelineSucceeds
only_allow_merge_if_all_discussions_are_resolved: onlyAllowMergeIfAllDiscussionsAreResolved
request_access_enabled: requestAccessEnabled
printing_merge_request_link_enabled: printingMergeRequestLinkEnabled
remove_source_branch_after_merge: removeSourceBranchAfterMerge
autoclose_referenced_issues: autocloseReferencedIssues
suggestion_commit_message: suggestionCommitMessage
wiki_enabled: wikiEnabled
}
}
GRAPHQL
end
def variables(context)
{ full_path: context.entity.source_full_path }
end
def base_path
%w[data project]
end
def data_path
base_path
end
def page_info_path
base_path << 'page_info'
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Projects
module Pipelines
class ProjectPipeline
include Pipeline
abort_on_failure!
extractor ::BulkImports::Common::Extractors::GraphqlExtractor, query: Graphql::GetProjectQuery
transformer ::BulkImports::Common::Transformers::ProhibitedAttributesTransformer
transformer ::BulkImports::Projects::Transformers::ProjectAttributesTransformer
def load(context, data)
project = ::Projects::CreateService.new(context.current_user, data).execute
if project.persisted?
context.entity.update!(project: project)
project
else
raise(::BulkImports::Error, "Unable to import project #{project.full_path}. #{project.errors.full_messages}.")
end
end
end
end
end
end
# frozen_string_literal: true
module BulkImports
module Projects
class Stage < ::BulkImports::Stage
private
def config
@config ||= {
group: {
pipeline: BulkImports::Projects::Pipelines::ProjectPipeline,
stage: 0
},
finisher: {
pipeline: BulkImports::Common::Pipelines::EntityFinisher,
stage: 1
}
}
end
end
end
end
::BulkImports::Projects::Stage.prepend_mod_with('BulkImports::Projects::Stage')
# frozen_string_literal: true
module BulkImports
module Projects
module Transformers
class ProjectAttributesTransformer
PROJECT_IMPORT_TYPE = 'gitlab_project_migration'
def transform(context, data)
entity = context.entity
visibility = data.delete('visibility')
data['name'] = entity.destination_name
data['path'] = entity.destination_name.parameterize
data['import_type'] = PROJECT_IMPORT_TYPE
data['visibility_level'] = Gitlab::VisibilityLevel.string_options[visibility] if visibility.present?
data['namespace_id'] = Namespace.find_by_full_path(entity.destination_namespace)&.id if entity.destination_namespace.present?
data.transform_keys!(&:to_sym)
end
end
end
end
end
......@@ -2,55 +2,8 @@
module BulkImports
class Stage
include Singleton
CONFIG = {
group: {
pipeline: BulkImports::Groups::Pipelines::GroupPipeline,
stage: 0
},
avatar: {
pipeline: BulkImports::Groups::Pipelines::GroupAvatarPipeline,
stage: 1
},
subgroups: {
pipeline: BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline,
stage: 1
},
members: {
pipeline: BulkImports::Groups::Pipelines::MembersPipeline,
stage: 1
},
labels: {
pipeline: BulkImports::Groups::Pipelines::LabelsPipeline,
stage: 1
},
milestones: {
pipeline: BulkImports::Groups::Pipelines::MilestonesPipeline,
stage: 1
},
badges: {
pipeline: BulkImports::Groups::Pipelines::BadgesPipeline,
stage: 1
},
boards: {
pipeline: BulkImports::Groups::Pipelines::BoardsPipeline,
stage: 2
},
finisher: {
pipeline: BulkImports::Groups::Pipelines::EntityFinisher,
stage: 3
}
}.freeze
def self.pipelines
instance.pipelines
end
def self.pipeline_exists?(name)
pipelines.any? do |(_, pipeline)|
pipeline.to_s == name.to_s
end
new.pipelines
end
def pipelines
......@@ -65,9 +18,8 @@ module BulkImports
private
def config
@config ||= CONFIG
# To be implemented in a sub-class
NotImplementedError
end
end
end
::BulkImports::Stage.prepend_mod_with('BulkImports::Stage')
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::EntityFinisher do
RSpec.describe BulkImports::Common::Pipelines::EntityFinisher do
it 'updates the entity status to finished' do
entity = create(:bulk_import_entity, :started)
pipeline_tracker = create(:bulk_import_tracker, entity: entity)
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetProjectsQuery do
describe '#variables' do
it 'returns valid variables based on entity information' do
tracker = create(:bulk_import_tracker)
context = BulkImports::Pipeline::Context.new(tracker)
query = GraphQL::Query.new(
GitlabSchema,
described_class.to_s,
variables: described_class.variables(context)
)
result = GitlabSchema.static_validator.validate(query)
expect(result[:errors]).to be_empty
end
context 'with invalid variables' do
it 'raises an error' do
expect { GraphQL::Query.new(GitlabSchema, described_class.to_s, variables: 'invalid') }.to raise_error(ArgumentError)
end
end
end
describe '#data_path' do
it 'returns data path' do
expected = %w[data group projects nodes]
expect(described_class.data_path).to eq(expected)
end
end
describe '#page_info_path' do
it 'returns pagination information path' do
expected = %w[data group projects page_info]
expect(described_class.page_info_path).to eq(expected)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:destination_group) { create(:group) }
let_it_be(:entity) do
create(
:bulk_import_entity,
group: destination_group,
destination_namespace: destination_group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:extracted_data) do
BulkImports::Pipeline::ExtractedData.new(data: {
'name' => 'project',
'full_path' => 'group/project'
})
end
subject { described_class.new(context) }
describe '#run' do
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data)
end
destination_group.add_owner(user)
end
it 'creates project entity' do
expect { subject.run }.to change(BulkImports::Entity, :count).by(1)
project_entity = BulkImports::Entity.last
expect(project_entity.source_type).to eq('project_entity')
expect(project_entity.source_full_path).to eq('group/project')
expect(project_entity.destination_name).to eq('project')
expect(project_entity.destination_namespace).to eq(destination_group.full_path)
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
expect(described_class.get_extractor).to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: {
query: BulkImports::Groups::Graphql::GetProjectsQuery
}
)
end
it 'has transformers' do
expect(described_class.transformers).to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
)
end
end
end
# frozen_string_literal: true
require 'fast_spec_helper'
require 'spec_helper'
RSpec.describe BulkImports::Stage do
RSpec.describe BulkImports::Groups::Stage do
let(:pipelines) do
[
[0, BulkImports::Groups::Pipelines::GroupPipeline],
......@@ -19,18 +19,21 @@ RSpec.describe BulkImports::Stage do
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
expect(described_class.pipelines & pipelines).to eq(pipelines)
expect(described_class.pipelines.last.last).to eq(BulkImports::Groups::Pipelines::EntityFinisher)
expect(described_class.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher)
end
end
describe '.pipeline_exists?' do
it 'returns true when the given pipeline name exists in the pipelines list' do
expect(described_class.pipeline_exists?(BulkImports::Groups::Pipelines::GroupPipeline)).to eq(true)
expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::GroupPipeline')).to eq(true)
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: true)
expect(described_class.pipelines).to include([1, BulkImports::Groups::Pipelines::ProjectEntitiesPipeline])
end
it 'returns false when the given pipeline name exists in the pipelines list' do
expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
context 'when bulk_import_projects feature flag is disabled' do
it 'does not include project entities pipeline' do
stub_feature_flags(bulk_import_projects: false)
expect(described_class.pipelines.flatten).not_to include(BulkImports::Groups::Pipelines::ProjectEntitiesPipeline)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline do
describe '#run' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
source_type: :project_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'My Destination Project',
destination_namespace: group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:project_data) do
{
'visibility' => 'private',
'created_at' => 10.days.ago,
'archived' => false,
'shared_runners_enabled' => true,
'container_registry_enabled' => true,
'only_allow_merge_if_pipeline_succeeds' => true,
'only_allow_merge_if_all_discussions_are_resolved' => true,
'request_access_enabled' => true,
'printing_merge_request_link_enabled' => true,
'remove_source_branch_after_merge' => true,
'autoclose_referenced_issues' => true,
'suggestion_commit_message' => 'message',
'wiki_enabled' => true
}
end
subject(:project_pipeline) { described_class.new(context) }
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data))
end
group.add_owner(user)
end
it 'imports new project into destination group', :aggregate_failures do
expect { project_pipeline.run }.to change { Project.count }.by(1)
project_path = 'my-destination-project'
imported_project = Project.find_by_path(project_path)
expect(imported_project).not_to be_nil
expect(imported_project.group).to eq(group)
expect(imported_project.suggestion_commit_message).to eq('message')
expect(imported_project.archived?).to eq(project_data['archived'])
expect(imported_project.shared_runners_enabled?).to eq(project_data['shared_runners_enabled'])
expect(imported_project.container_registry_enabled?).to eq(project_data['container_registry_enabled'])
expect(imported_project.only_allow_merge_if_pipeline_succeeds?).to eq(project_data['only_allow_merge_if_pipeline_succeeds'])
expect(imported_project.only_allow_merge_if_all_discussions_are_resolved?).to eq(project_data['only_allow_merge_if_all_discussions_are_resolved'])
expect(imported_project.request_access_enabled?).to eq(project_data['request_access_enabled'])
expect(imported_project.printing_merge_request_link_enabled?).to eq(project_data['printing_merge_request_link_enabled'])
expect(imported_project.remove_source_branch_after_merge?).to eq(project_data['remove_source_branch_after_merge'])
expect(imported_project.autoclose_referenced_issues?).to eq(project_data['autoclose_referenced_issues'])
expect(imported_project.wiki_enabled?).to eq(project_data['wiki_enabled'])
end
end
describe 'pipeline parts' do
it { expect(described_class).to include_module(BulkImports::Pipeline) }
it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
it 'has extractors' do
expect(described_class.get_extractor)
.to eq(
klass: BulkImports::Common::Extractors::GraphqlExtractor,
options: { query: BulkImports::Projects::Graphql::GetProjectQuery }
)
end
it 'has transformers' do
expect(described_class.transformers)
.to contain_exactly(
{ klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil },
{ klass: BulkImports::Projects::Transformers::ProjectAttributesTransformer, options: nil }
)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Stage do
let(:pipelines) do
[
[0, BulkImports::Projects::Pipelines::ProjectPipeline],
[1, BulkImports::Common::Pipelines::EntityFinisher]
]
end
describe '.pipelines' do
it 'list all the pipelines with their stage number, ordered by stage' do
expect(described_class.pipelines).to eq(pipelines)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer do
describe '#transform' do
let_it_be(:user) { create(:user) }
let_it_be(:destination_group) { create(:group) }
let_it_be(:project) { create(:project, name: 'My Source Project') }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) do
create(
:bulk_import_entity,
source_type: :project_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'Destination Project Name',
destination_namespace: destination_group.full_path
)
end
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:data) do
{
'name' => 'source_name',
'visibility' => 'private'
}
end
subject(:transformed_data) { described_class.new.transform(context, data) }
it 'transforms name to destination name' do
expect(transformed_data[:name]).to eq(entity.destination_name)
end
it 'adds path as parameterized name' do
expect(transformed_data[:path]).to eq(entity.destination_name.parameterize)
end
it 'transforms visibility level' do
visibility = data['visibility']
expect(transformed_data).not_to have_key(:visibility)
expect(transformed_data[:visibility_level]).to eq(Gitlab::VisibilityLevel.string_options[visibility])
end
it 'adds import type' do
expect(transformed_data[:import_type]).to eq(described_class::PROJECT_IMPORT_TYPE)
end
describe 'namespace_id' do
context 'when destination namespace is present' do
it 'adds namespace_id' do
expect(transformed_data[:namespace_id]).to eq(destination_group.id)
end
end
context 'when destination namespace is blank' do
it 'does not add namespace_id key' do
entity = create(
:bulk_import_entity,
source_type: :project_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
destination_name: 'Destination Project Name',
destination_namespace: ''
)
context = double(entity: entity)
expect(described_class.new.transform(context, data)).not_to have_key(:namespace_id)
end
end
end
it 'converts all keys to symbols' do
expect(transformed_data.keys).to contain_exactly(:name, :path, :import_type, :visibility_level, :namespace_id)
end
end
end
......@@ -154,4 +154,57 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(described_class.all_human_statuses).to contain_exactly('created', 'started', 'finished', 'failed')
end
end
describe '#pipelines' do
context 'when entity is group' do
it 'returns group pipelines' do
entity = build(:bulk_import_entity, :group_entity)
expect(entity.pipelines.flatten).to include(BulkImports::Groups::Pipelines::GroupPipeline)
end
end
context 'when entity is project' do
it 'returns project pipelines' do
entity = build(:bulk_import_entity, :project_entity)
expect(entity.pipelines.flatten).to include(BulkImports::Projects::Pipelines::ProjectPipeline)
end
end
end
describe '#create_pipeline_trackers!' do
context 'when entity is group' do
it 'creates trackers for group entity' do
entity = create(:bulk_import_entity, :group_entity)
entity.create_pipeline_trackers!
expect(entity.trackers.count).to eq(BulkImports::Groups::Stage.pipelines.count)
expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Groups::Pipelines::GroupPipeline.to_s)
end
end
context 'when entity is project' do
it 'creates trackers for project entity' do
entity = create(:bulk_import_entity, :project_entity)
entity.create_pipeline_trackers!
expect(entity.trackers.count).to eq(BulkImports::Projects::Stage.pipelines.count)
expect(entity.trackers.map(&:pipeline_name)).to include(BulkImports::Projects::Pipelines::ProjectPipeline.to_s)
end
end
end
describe '#pipeline_exists?' do
let_it_be(:entity) { create(:bulk_import_entity, :group_entity) }
it 'returns true when the given pipeline name exists in the pipelines list' do
expect(entity.pipeline_exists?(BulkImports::Groups::Pipelines::GroupPipeline)).to eq(true)
expect(entity.pipeline_exists?('BulkImports::Groups::Pipelines::GroupPipeline')).to eq(true)
end
it 'returns false when the given pipeline name exists in the pipelines list' do
expect(entity.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
end
end
end
......@@ -66,7 +66,7 @@ RSpec.describe BulkImports::Tracker, type: :model do
describe '#pipeline_class' do
it 'returns the pipeline class' do
pipeline_class = BulkImports::Stage.pipelines.first[1]
pipeline_class = BulkImports::Groups::Stage.pipelines.first[1]
tracker = create(:bulk_import_tracker, pipeline_name: pipeline_class)
expect(tracker.pipeline_class).to eq(pipeline_class)
......@@ -77,7 +77,7 @@ RSpec.describe BulkImports::Tracker, type: :model do
expect { tracker.pipeline_class }
.to raise_error(
NameError,
BulkImports::Error,
"'InexistingPipeline' is not a valid BulkImport Pipeline"
)
end
......
......@@ -84,17 +84,20 @@ RSpec.describe BulkImportWorker do
expect { subject.perform(bulk_import.id) }
.to change(BulkImports::Tracker, :count)
.by(BulkImports::Stage.pipelines.size * 2)
.by(BulkImports::Groups::Stage.pipelines.size * 2)
expect(entity_1.trackers).not_to be_empty
expect(entity_2.trackers).not_to be_empty
end
context 'when there are created entities to process' do
it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
let_it_be(:bulk_import) { create(:bulk_import, :created) }
before do
stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
end
bulk_import = create(:bulk_import, :created)
it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
create(:bulk_import_entity, :created, bulk_import: bulk_import)
create(:bulk_import_entity, :created, bulk_import: bulk_import)
......@@ -106,6 +109,16 @@ RSpec.describe BulkImportWorker do
expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
end
context 'when there are project entities to process' do
it 'does not enqueue ExportRequestWorker' do
create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
subject.perform(bulk_import.id)
end
end
end
context 'when exception occurs' do
......
......@@ -21,6 +21,10 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('FakePipeline', pipeline_class)
allow(BulkImports::Groups::Stage)
.to receive(:pipelines)
.and_return([[0, pipeline_class]])
end
it 'runs the given pipeline successfully' do
......@@ -30,12 +34,6 @@ RSpec.describe BulkImports::PipelineWorker do
pipeline_name: 'FakePipeline'
)
expect(BulkImports::Stage)
.to receive(:pipeline_exists?)
.with('FakePipeline')
.twice
.and_return(true)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger)
.to receive(:info)
......@@ -110,7 +108,7 @@ RSpec.describe BulkImports::PipelineWorker do
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
.with(
instance_of(NameError),
instance_of(BulkImports::Error),
entity_id: entity.id,
pipeline_name: pipeline_tracker.pipeline_name
)
......@@ -157,10 +155,10 @@ RSpec.describe BulkImports::PipelineWorker do
before do
stub_const('NdjsonPipeline', ndjson_pipeline)
allow(BulkImports::Stage)
.to receive(:pipeline_exists?)
.with('NdjsonPipeline')
.and_return(true)
allow(BulkImports::Groups::Stage)
.to receive(:pipelines)
.and_return([[0, ndjson_pipeline]])
end
it 'runs the pipeline successfully' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment