Commit 351392f4 authored by Nick Thomas's avatar Nick Thomas

Remove background migrations for old schemas

On the assumption that a background migration whose specs need a schema
older than 2018 is obsoleted by this migration squash, we can remove
both specs and code for those that fail to run in CI as a result of the
schema at that date no longer existing.

This is true for all but the MigrateStageStatus background migration,
which is also used from the MigrateBuildStage background migration.
parent 876d4151
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class CreateForkNetworkMembershipsRange
RESCHEDULE_DELAY = 15
class ForkedProjectLink < ActiveRecord::Base
self.table_name = 'forked_project_links'
end
def perform(start_id, end_id)
log("Creating memberships for forks: #{start_id} - #{end_id}")
insert_members(start_id, end_id)
if missing_members?(start_id, end_id)
BackgroundMigrationWorker.perform_in(RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [start_id, end_id])
end
end
def insert_members(start_id, end_id)
ActiveRecord::Base.connection.execute <<~INSERT_MEMBERS
INSERT INTO fork_network_members (fork_network_id, project_id, forked_from_project_id)
SELECT fork_network_members.fork_network_id,
forked_project_links.forked_to_project_id,
forked_project_links.forked_from_project_id
FROM forked_project_links
INNER JOIN fork_network_members
ON forked_project_links.forked_from_project_id = fork_network_members.project_id
WHERE forked_project_links.id BETWEEN #{start_id} AND #{end_id}
AND NOT EXISTS (
SELECT true
FROM fork_network_members existing_members
WHERE existing_members.project_id = forked_project_links.forked_to_project_id
)
INSERT_MEMBERS
rescue ActiveRecord::RecordNotUnique => e
# `fork_network_member` was created concurrently in another migration
log(e.message)
end
def missing_members?(start_id, end_id)
count_sql = <<~MISSING_MEMBERS
SELECT COUNT(*)
FROM forked_project_links
WHERE NOT EXISTS (
SELECT true
FROM fork_network_members
WHERE fork_network_members.project_id = forked_project_links.forked_to_project_id
)
AND EXISTS (
SELECT true
FROM projects
WHERE forked_project_links.forked_from_project_id = projects.id
)
AND NOT EXISTS (
SELECT true
FROM forked_project_links AS parent_links
WHERE parent_links.forked_to_project_id = forked_project_links.forked_from_project_id
AND NOT EXISTS (
SELECT true
FROM projects
WHERE parent_links.forked_from_project_id = projects.id
)
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
MISSING_MEMBERS
ForkedProjectLink.count_by_sql(count_sql) > 0
end
def log(message)
Rails.logger.info("#{self.class.name} - #{message}")
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class DeleteConflictingRedirectRoutesRange
def perform(start_id, end_id)
# No-op.
# See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
# Class that migrates events for the new push event payloads setup. All
# events are copied to a shadow table, and push events will also have a row
# created in the push_event_payloads table.
class MigrateEventsToPushEventPayloads
class Event < ActiveRecord::Base
self.table_name = 'events'
serialize :data
BLANK_REF = ('0' * 40).freeze
TAG_REF_PREFIX = 'refs/tags/'.freeze
MAX_INDEX = 69
PUSHED = 5
def push_event?
action == PUSHED && data.present?
end
def commit_title
commit = commits.last
return unless commit && commit[:message]
index = commit[:message].index("\n")
message = index ? commit[:message][0..index] : commit[:message]
message.strip.truncate(70)
end
def commit_from_sha
if create?
nil
else
data[:before]
end
end
def commit_to_sha
if remove?
nil
else
data[:after]
end
end
def data
super || {}
end
def commits
data[:commits] || []
end
def commit_count
data[:total_commits_count] || 0
end
def ref
data[:ref]
end
def trimmed_ref_name
if ref_type == :tag
ref[10..-1]
else
ref[11..-1]
end
end
def create?
data[:before] == BLANK_REF
end
def remove?
data[:after] == BLANK_REF
end
def push_action
if create?
:created
elsif remove?
:removed
else
:pushed
end
end
def ref_type
if ref.start_with?(TAG_REF_PREFIX)
:tag
else
:branch
end
end
end
class EventForMigration < ActiveRecord::Base
self.table_name = 'events_for_migration'
end
class PushEventPayload < ActiveRecord::Base
self.table_name = 'push_event_payloads'
enum action: {
created: 0,
removed: 1,
pushed: 2
}
enum ref_type: {
branch: 0,
tag: 1
}
end
# start_id - The start ID of the range of events to process
# end_id - The end ID of the range to process.
def perform(start_id, end_id)
return unless migrate?
find_events(start_id, end_id).each { |event| process_event(event) }
end
def process_event(event)
ActiveRecord::Base.transaction do
replicate_event(event)
create_push_event_payload(event) if event.push_event?
end
rescue ActiveRecord::InvalidForeignKey => e
# A foreign key error means the associated event was removed. In this
# case we'll just skip migrating the event.
Rails.logger.error("Unable to migrate event #{event.id}: #{e}")
end
def replicate_event(event)
new_attributes = event.attributes
.with_indifferent_access.except(:title, :data)
EventForMigration.create!(new_attributes)
end
def create_push_event_payload(event)
commit_from = pack(event.commit_from_sha)
commit_to = pack(event.commit_to_sha)
PushEventPayload.create!(
event_id: event.id,
commit_count: event.commit_count,
ref_type: event.ref_type,
action: event.push_action,
commit_from: commit_from,
commit_to: commit_to,
ref: event.trimmed_ref_name,
commit_title: event.commit_title
)
end
def find_events(start_id, end_id)
Event
.where('NOT EXISTS (SELECT true FROM events_for_migration WHERE events_for_migration.id = events.id)')
.where(id: start_id..end_id)
end
def migrate?
Event.table_exists? && PushEventPayload.table_exists? &&
EventForMigration.table_exists?
end
def pack(value)
value ? [value].pack('H*') : nil
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration is going to create all `fork_networks` and
# the `fork_network_members` for the roots of fork networks based on the
# existing `forked_project_links`.
#
# When the source of a fork is deleted, we will create the fork with the
# target project as the root. This way, when there are forks of the target
# project, they will be joined into the same fork network.
#
# When the `fork_networks` and memberships for the root projects are created
# the `CreateForkNetworkMembershipsRange` migration is scheduled. This
# migration will create the memberships for all remaining forks-of-forks
class PopulateForkNetworksRange
def perform(start_id, end_id)
create_fork_networks_for_existing_projects(start_id, end_id)
create_fork_networks_for_missing_projects(start_id, end_id)
create_fork_networks_memberships_for_root_projects(start_id, end_id)
delay = BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
BackgroundMigrationWorker.perform_in(
delay, "CreateForkNetworkMembershipsRange", [start_id, end_id]
)
end
def create_fork_networks_for_existing_projects(start_id, end_id)
log("Creating fork networks: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
INSERT INTO fork_networks (root_project_id)
SELECT DISTINCT forked_project_links.forked_from_project_id
FROM forked_project_links
-- Exclude the forks that are not the first level fork of a project
WHERE NOT EXISTS (
SELECT true
FROM forked_project_links inner_links
WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
)
/* Exclude the ones that are already created, in case the fork network
was already created for another fork of the project.
*/
AND NOT EXISTS (
SELECT true
FROM fork_networks
WHERE forked_project_links.forked_from_project_id = fork_networks.root_project_id
)
-- Only create a fork network for a root project that still exists
AND EXISTS (
SELECT true
FROM projects
WHERE projects.id = forked_project_links.forked_from_project_id
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_NETWORKS
end
def create_fork_networks_for_missing_projects(start_id, end_id)
log("Creating fork networks with missing root: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_NETWORKS
INSERT INTO fork_networks (root_project_id)
SELECT DISTINCT forked_project_links.forked_to_project_id
FROM forked_project_links
-- Exclude forks that are not the root forks
WHERE NOT EXISTS (
SELECT true
FROM forked_project_links inner_links
WHERE inner_links.forked_to_project_id = forked_project_links.forked_from_project_id
)
/* Exclude the ones that are already created, in case this migration is
re-run
*/
AND NOT EXISTS (
SELECT true
FROM fork_networks
WHERE forked_project_links.forked_to_project_id = fork_networks.root_project_id
)
/* Exclude projects for which the project still exists, those are
Processed in the previous step of this migration
*/
AND NOT EXISTS (
SELECT true
FROM projects
WHERE projects.id = forked_project_links.forked_from_project_id
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_NETWORKS
end
def create_fork_networks_memberships_for_root_projects(start_id, end_id)
log("Creating memberships for root projects: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~INSERT_ROOT
INSERT INTO fork_network_members (fork_network_id, project_id)
SELECT DISTINCT fork_networks.id, fork_networks.root_project_id
FROM fork_networks
/* Joining both on forked_from- and forked_to- so we could create the
memberships for forks for which the source was deleted
*/
INNER JOIN forked_project_links
ON forked_project_links.forked_from_project_id = fork_networks.root_project_id
OR forked_project_links.forked_to_project_id = fork_networks.root_project_id
WHERE NOT EXISTS (
SELECT true
FROM fork_network_members
WHERE fork_network_members.project_id = fork_networks.root_project_id
)
AND forked_project_links.id BETWEEN #{start_id} AND #{end_id}
INSERT_ROOT
end
def log(message)
Rails.logger.info("#{self.class.name} - #{message}")
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class PopulateMergeRequestsLatestMergeRequestDiffId
BATCH_SIZE = 1_000
class MergeRequest < ActiveRecord::Base
self.table_name = 'merge_requests'
include ::EachBatch
end
def perform(start_id, stop_id)
update = '
latest_merge_request_diff_id = (
SELECT MAX(id)
FROM merge_request_diffs
WHERE merge_requests.id = merge_request_diffs.merge_request_id
)'.squish
MergeRequest
.where(id: start_id..stop_id)
.where(latest_merge_request_diff_id: nil)
.each_batch(of: BATCH_SIZE) do |relation|
relation.update_all(update)
end
end
end
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange, :migration, schema: 20170929131201 do
let(:migration) { described_class.new }
let(:projects) { table(:projects) }
let(:base1) { projects.create }
let(:base1_fork1) { projects.create }
let(:base1_fork2) { projects.create }
let(:base2) { projects.create }
let(:base2_fork1) { projects.create }
let(:base2_fork2) { projects.create }
let(:fork_of_fork) { projects.create }
let(:fork_of_fork2) { projects.create }
let(:second_level_fork) { projects.create }
let(:third_level_fork) { projects.create }
let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
let!(:forked_project_links) { table(:forked_project_links) }
let!(:fork_networks) { table(:fork_networks) }
let!(:fork_network_members) { table(:fork_network_members) }
before do
# The fork-network relation created for the forked project
fork_networks.create(id: 1, root_project_id: base1.id)
fork_network_members.create(project_id: base1.id, fork_network_id: 1)
fork_networks.create(id: 2, root_project_id: base2.id)
fork_network_members.create(project_id: base2.id, fork_network_id: 2)
# Normal fork links
forked_project_links.create(id: 1, forked_from_project_id: base1.id, forked_to_project_id: base1_fork1.id)
forked_project_links.create(id: 2, forked_from_project_id: base1.id, forked_to_project_id: base1_fork2.id)
forked_project_links.create(id: 3, forked_from_project_id: base2.id, forked_to_project_id: base2_fork1.id)
forked_project_links.create(id: 4, forked_from_project_id: base2.id, forked_to_project_id: base2_fork2.id)
# Fork links
forked_project_links.create(id: 5, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork.id)
forked_project_links.create(id: 6, forked_from_project_id: base1_fork1.id, forked_to_project_id: fork_of_fork2.id)
# Forks 3 levels down
forked_project_links.create(id: 7, forked_from_project_id: fork_of_fork.id, forked_to_project_id: second_level_fork.id)
forked_project_links.create(id: 8, forked_from_project_id: second_level_fork.id, forked_to_project_id: third_level_fork.id)
migration.perform(1, 8)
end
it 'creates a memberships for the direct forks' do
base1_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
project_id: base1_fork1.id)
base1_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
project_id: base1_fork2.id)
base2_fork1_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
project_id: base2_fork1.id)
base2_fork2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
project_id: base2_fork2.id)
expect(base1_fork1_membership.forked_from_project_id).to eq(base1.id)
expect(base1_fork2_membership.forked_from_project_id).to eq(base1.id)
expect(base2_fork1_membership.forked_from_project_id).to eq(base2.id)
expect(base2_fork2_membership.forked_from_project_id).to eq(base2.id)
end
it 'adds the fork network members for forks of forks' do
fork_of_fork_membership = fork_network_members.find_by(project_id: fork_of_fork.id,
fork_network_id: fork_network1.id)
fork_of_fork2_membership = fork_network_members.find_by(project_id: fork_of_fork2.id,
fork_network_id: fork_network1.id)
second_level_fork_membership = fork_network_members.find_by(project_id: second_level_fork.id,
fork_network_id: fork_network1.id)
third_level_fork_membership = fork_network_members.find_by(project_id: third_level_fork.id,
fork_network_id: fork_network1.id)
expect(fork_of_fork_membership.forked_from_project_id).to eq(base1_fork1.id)
expect(fork_of_fork2_membership.forked_from_project_id).to eq(base1_fork1.id)
expect(second_level_fork_membership.forked_from_project_id).to eq(fork_of_fork.id)
expect(third_level_fork_membership.forked_from_project_id).to eq(second_level_fork.id)
end
it 'reschedules itself when there are missing members' do
allow(migration).to receive(:missing_members?).and_return(true)
expect(BackgroundMigrationWorker)
.to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, "CreateForkNetworkMembershipsRange", [1, 3])
migration.perform(1, 3)
end
it 'can be repeated without effect' do
expect { fork_network_members.count }.not_to change { migration.perform(1, 7) }
end
it 'knows it is finished for this range' do
expect(migration.missing_members?(1, 8)).to be_falsy
end
it 'does not miss members for forks of forks for which the root was deleted' do
forked_project_links.create(id: 9, forked_from_project_id: base1_fork1.id, forked_to_project_id: projects.create.id)
base1.destroy
expect(migration.missing_members?(7, 10)).to be_falsy
end
context 'with more forks' do
before do
forked_project_links.create(id: 9, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
forked_project_links.create(id: 10, forked_from_project_id: fork_of_fork.id, forked_to_project_id: projects.create.id)
end
it 'only processes a single batch of links at a time' do
expect(fork_network_members.count).to eq(10)
migration.perform(8, 10)
expect(fork_network_members.count).to eq(12)
end
it 'knows when not all memberships within a batch have been created' do
expect(migration.missing_members?(8, 10)).to be_truthy
end
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::DeleteConflictingRedirectRoutesRange, :migration, schema: 20170907170235 do
let!(:redirect_routes) { table(:redirect_routes) }
let!(:routes) { table(:routes) }
before do
routes.create!(id: 1, source_id: 1, source_type: 'Namespace', path: 'foo1')
routes.create!(id: 2, source_id: 2, source_type: 'Namespace', path: 'foo2')
routes.create!(id: 3, source_id: 3, source_type: 'Namespace', path: 'foo3')
routes.create!(id: 4, source_id: 4, source_type: 'Namespace', path: 'foo4')
routes.create!(id: 5, source_id: 5, source_type: 'Namespace', path: 'foo5')
# Valid redirects
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'bar2')
redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'bar3')
# Conflicting redirects
redirect_routes.create!(source_id: 2, source_type: 'Namespace', path: 'foo1')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo2')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo3')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo4')
redirect_routes.create!(source_id: 1, source_type: 'Namespace', path: 'foo5')
end
# No-op. See https://gitlab.com/gitlab-com/infrastructure/issues/3460#note_53223252
it 'NO-OP: does not delete any redirect_routes' do
expect(redirect_routes.count).to eq(8)
described_class.new.perform(1, 5)
expect(redirect_routes.count).to eq(8)
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::MigrateStageStatus, :migration, schema: 20170711145320 do
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
let(:stages) { table(:ci_stages) }
let(:jobs) { table(:ci_builds) }
let(:statuses) do
{
created: 0,
pending: 1,
running: 2,
success: 3,
failed: 4,
canceled: 5,
skipped: 6,
manual: 7
}
end
before do
projects.create!(id: 1, name: 'gitlab1', path: 'gitlab1')
pipelines.create!(id: 1, project_id: 1, ref: 'master', sha: 'adf43c3a')
stages.create!(id: 1, pipeline_id: 1, project_id: 1, name: 'test', status: nil)
stages.create!(id: 2, pipeline_id: 1, project_id: 1, name: 'deploy', status: nil)
end
context 'when stage status is known' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'success')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'running')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:running]
expect(stages.second.status).to eq statuses[:failed]
end
end
context 'when stage status is not known' do
it 'sets a skipped stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:skipped]
expect(stages.second.status).to eq statuses[:skipped]
end
end
context 'when stage status includes status of a retried job' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'canceled')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'failed', retried: true)
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:canceled]
expect(stages.second.status).to eq statuses[:success]
end
end
context 'when some job in the stage is blocked / manual' do
before do
create_job(project: 1, pipeline: 1, stage: 'test', status: 'failed')
create_job(project: 1, pipeline: 1, stage: 'test', status: 'manual')
create_job(project: 1, pipeline: 1, stage: 'deploy', status: 'success', when: 'manual')
end
it 'sets a correct stage status' do
described_class.new.perform(1, 2)
expect(stages.first.status).to eq statuses[:manual]
expect(stages.second.status).to eq statuses[:success]
end
end
def create_job(project:, pipeline:, stage:, status:, **opts)
stages = { test: 1, build: 2, deploy: 3 }
jobs.create!(project_id: project, commit_id: pipeline,
stage_idx: stages[stage.to_sym], stage: stage,
status: status, **opts)
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::NormalizeLdapExternUidsRange, :migration, schema: 20170921101004 do
let!(:identities) { table(:identities) }
before do
# LDAP identities
(1..4).each do |i|
identities.create!(id: i, provider: 'ldapmain', extern_uid: " uid = foo #{i}, ou = People, dc = example, dc = com ", user_id: i)
end
# Non-LDAP identity
identities.create!(id: 5, provider: 'foo', extern_uid: " uid = foo 5, ou = People, dc = example, dc = com ", user_id: 5)
# Another LDAP identity
identities.create!(id: 6, provider: 'ldapmain', extern_uid: " uid = foo 6, ou = People, dc = example, dc = com ", user_id: 6)
end
it 'normalizes the LDAP identities in the range' do
described_class.new.perform(1, 3)
expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
expect(identities.find(4).extern_uid).to eq(" uid = foo 4, ou = People, dc = example, dc = com ")
expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
expect(identities.find(6).extern_uid).to eq(" uid = foo 6, ou = People, dc = example, dc = com ")
described_class.new.perform(4, 6)
expect(identities.find(1).extern_uid).to eq("uid=foo 1,ou=people,dc=example,dc=com")
expect(identities.find(2).extern_uid).to eq("uid=foo 2,ou=people,dc=example,dc=com")
expect(identities.find(3).extern_uid).to eq("uid=foo 3,ou=people,dc=example,dc=com")
expect(identities.find(4).extern_uid).to eq("uid=foo 4,ou=people,dc=example,dc=com")
expect(identities.find(5).extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
expect(identities.find(6).extern_uid).to eq("uid=foo 6,ou=people,dc=example,dc=com")
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateForkNetworksRange, :migration, schema: 20170929131201 do
let(:migration) { described_class.new }
let(:projects) { table(:projects) }
let(:base1) { projects.create }
let(:base2) { projects.create }
let(:base2_fork1) { projects.create }
let!(:forked_project_links) { table(:forked_project_links) }
let!(:fork_networks) { table(:fork_networks) }
let!(:fork_network_members) { table(:fork_network_members) }
let(:fork_network1) { fork_networks.find_by(root_project_id: base1.id) }
let(:fork_network2) { fork_networks.find_by(root_project_id: base2.id) }
before do
# A normal fork link
forked_project_links.create(id: 1,
forked_from_project_id: base1.id,
forked_to_project_id: projects.create.id)
forked_project_links.create(id: 2,
forked_from_project_id: base1.id,
forked_to_project_id: projects.create.id)
forked_project_links.create(id: 3,
forked_from_project_id: base2.id,
forked_to_project_id: base2_fork1.id)
# create a fork of a fork
forked_project_links.create(id: 4,
forked_from_project_id: base2_fork1.id,
forked_to_project_id: projects.create.id)
forked_project_links.create(id: 5,
forked_from_project_id: projects.create.id,
forked_to_project_id: projects.create.id)
# Stub out the calls to the other migrations
allow(BackgroundMigrationWorker).to receive(:perform_in)
migration.perform(1, 3)
end
it 'creates the fork network' do
expect(fork_network1).not_to be_nil
expect(fork_network2).not_to be_nil
end
it 'does not create a fork network for a fork-of-fork' do
# perfrom the entire batch
migration.perform(1, 5)
expect(fork_networks.find_by(root_project_id: base2_fork1.id)).to be_nil
end
it 'creates memberships for the root of fork networks' do
base1_membership = fork_network_members.find_by(fork_network_id: fork_network1.id,
project_id: base1.id)
base2_membership = fork_network_members.find_by(fork_network_id: fork_network2.id,
project_id: base2.id)
expect(base1_membership).not_to be_nil
expect(base2_membership).not_to be_nil
end
it 'creates a fork network for the fork of which the source was deleted' do
fork = projects.create
forked_project_links.create(id: 6, forked_from_project_id: 99999, forked_to_project_id: fork.id)
migration.perform(5, 8)
expect(fork_networks.find_by(root_project_id: 99999)).to be_nil
expect(fork_networks.find_by(root_project_id: fork.id)).not_to be_nil
expect(fork_network_members.find_by(project_id: fork.id)).not_to be_nil
end
it 'schedules a job for inserting memberships for forks-of-forks' do
delay = Gitlab::BackgroundMigration::CreateForkNetworkMembershipsRange::RESCHEDULE_DELAY
expect(BackgroundMigrationWorker)
.to receive(:perform_in).with(delay, "CreateForkNetworkMembershipsRange", [1, 3])
migration.perform(1, 3)
end
it 'only processes a single batch of links at a time' do
expect(fork_networks.count).to eq(2)
migration.perform(3, 5)
expect(fork_networks.count).to eq(3)
end
it 'can be repeated without effect' do
expect { migration.perform(1, 3) }.not_to change { fork_network_members.count }
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateMergeRequestsLatestMergeRequestDiffId, :migration, schema: 20171026082505 do
let(:projects_table) { table(:projects) }
let(:merge_requests_table) { table(:merge_requests) }
let(:merge_request_diffs_table) { table(:merge_request_diffs) }
let(:project) { projects_table.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce') }
def create_mr!(name, diffs: 0)
merge_request =
merge_requests_table.create!(target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: name,
title: name)
diffs.times do
merge_request_diffs_table.create!(merge_request_id: merge_request.id)
end
merge_request
end
def diffs_for(merge_request)
merge_request_diffs_table.where(merge_request_id: merge_request.id)
end
describe '#perform' do
it 'ignores MRs without diffs' do
merge_request_without_diff = create_mr!('without_diff')
mr_id = merge_request_without_diff.id
expect(merge_request_without_diff.latest_merge_request_diff_id).to be_nil
expect { subject.perform(mr_id, mr_id) }
.not_to change { merge_request_without_diff.reload.latest_merge_request_diff_id }
end
it 'ignores MRs that have a diff ID already set' do
merge_request_with_multiple_diffs = create_mr!('with_multiple_diffs', diffs: 3)
diff_id = diffs_for(merge_request_with_multiple_diffs).minimum(:id)
mr_id = merge_request_with_multiple_diffs.id
merge_request_with_multiple_diffs.update!(latest_merge_request_diff_id: diff_id)
expect { subject.perform(mr_id, mr_id) }
.not_to change { merge_request_with_multiple_diffs.reload.latest_merge_request_diff_id }
end
it 'migrates multiple MR diffs to the correct values' do
merge_requests = Array.new(3).map.with_index { |_, i| create_mr!(i, diffs: 3) }
subject.perform(merge_requests.first.id, merge_requests.last.id)
merge_requests.each do |merge_request|
expect(merge_request.reload.latest_merge_request_diff_id)
.to eq(diffs_for(merge_request).maximum(:id))
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment