Commit 0ca3c672 authored by Robert Speicher's avatar Robert Speicher

Merge branch 'backport-schema-changes' into 'master'

Backport the EE schema to CE

Closes gitlab-ee#9686

See merge request gitlab-org/gitlab-ce!26940
parents 982d62a0 d5baece0
......@@ -111,7 +111,7 @@
variables:
SETUP_DB: "false"
script:
- git fetch https://gitlab.com/gitlab-org/gitlab-ce.git v9.3.0
- git fetch https://gitlab.com/gitlab-org/gitlab-ce.git v11.11.0
- git checkout -f FETCH_HEAD
- sed -i "s/gem 'oj', '~> 2.17.4'//" Gemfile
- sed -i "s/gem 'bootsnap', '~> 1.0.0'/gem 'bootsnap'/" Gemfile
......
---
title: Backport the EE schema and migrations to CE
merge_request: 26940
author: Yorick Peterse
type: other
......@@ -13,7 +13,6 @@ DB_FILES = [
'lib/gitlab/github_import/',
'lib/gitlab/sql/',
'rubocop/cop/migration',
'ee/db/',
'ee/lib/gitlab/database/'
].freeze
......
class CreateUserActivities < ActiveRecord::Migration[4.2]
DOWNTIME = false
# Set this constant to true if this migration requires downtime.
DOWNTIME = true
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
DOWNTIME_REASON = 'Adding foreign key'.freeze
# When using the methods "add_concurrent_index" or "add_column_with_default"
# you must disable the use of transactions as these methods can not run in an
# existing transaction. When using "add_concurrent_index" make sure that this
# method is the _only_ method called in the migration, any other changes
# should go in a separate migration. This ensures that upon failure _only_ the
# index creation fails and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
# disable_ddl_transaction!
# This migration is a no-op. It just exists to match EE.
def change
create_table :user_activities do |t|
t.belongs_to :user, index: { unique: true }, foreign_key: { on_delete: :cascade }
t.datetime :last_activity_at, null: false
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class RemoveUndeletedGroups < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
is_ee = defined?(Gitlab::License)
if is_ee
execute <<-EOF.strip_heredoc
DELETE FROM path_locks
WHERE project_id IN (
SELECT project_id
FROM projects
WHERE namespace_id IN (#{namespaces_pending_removal})
);
EOF
execute <<-EOF.strip_heredoc
DELETE FROM remote_mirrors
WHERE project_id IN (
SELECT project_id
FROM projects
WHERE namespace_id IN (#{namespaces_pending_removal})
);
EOF
end
execute <<-EOF.strip_heredoc
DELETE FROM lists
WHERE label_id IN (
SELECT id
FROM labels
WHERE group_id IN (#{namespaces_pending_removal})
);
EOF
execute <<-EOF.strip_heredoc
DELETE FROM lists
WHERE board_id IN (
SELECT id
FROM boards
WHERE project_id IN (
SELECT project_id
FROM projects
WHERE namespace_id IN (#{namespaces_pending_removal})
)
);
EOF
execute <<-EOF.strip_heredoc
DELETE FROM labels
WHERE group_id IN (#{namespaces_pending_removal});
EOF
execute <<-EOF.strip_heredoc
DELETE FROM boards
WHERE project_id IN (
SELECT project_id
FROM projects
WHERE namespace_id IN (#{namespaces_pending_removal})
)
EOF
execute <<-EOF.strip_heredoc
DELETE FROM projects
WHERE namespace_id IN (#{namespaces_pending_removal});
EOF
if is_ee
# EE adds these columns but we have to make sure this data is cleaned up
# here before we run the DELETE below. An alternative would be patching
# this migration in EE but this will only result in a mess and confusing
# migrations.
execute <<-EOF.strip_heredoc
DELETE FROM protected_branch_push_access_levels
WHERE group_id IN (#{namespaces_pending_removal});
EOF
execute <<-EOF.strip_heredoc
DELETE FROM protected_branch_merge_access_levels
WHERE group_id IN (#{namespaces_pending_removal});
EOF
end
# This removes namespaces that were supposed to be deleted but still reside
# in the database.
execute "DELETE FROM namespaces WHERE deleted_at IS NOT NULL;"
end
def down
# This is an irreversible migration;
# If someone is trying to rollback for other reasons, we should not throw an Exception.
# raise ActiveRecord::IrreversibleMigration
end
def namespaces_pending_removal
"SELECT id FROM (
SELECT id
FROM namespaces
WHERE deleted_at IS NOT NULL
) namespace_ids"
end
end
class CleanUpFromMergeRequestDiffsAndCommits < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
include ::EachBatch
end
disable_ddl_transaction!
def up
Gitlab::BackgroundMigration.steal('DeserializeMergeRequestDiffsAndCommits')
# The literal '--- []\n' value is created by the import process and treated
# as null by the application, so we can ignore those - even if we were
# migrating, it wouldn't create any rows.
literal_prefix = Gitlab::Database.postgresql? ? 'E' : ''
non_empty = "
(st_commits IS NOT NULL AND st_commits != #{literal_prefix}'--- []\n')
OR
(st_diffs IS NOT NULL AND st_diffs != #{literal_prefix}'--- []\n')
".squish
MergeRequestDiff.where(non_empty).each_batch(of: 500) do |relation, index|
range = relation.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits.new.perform(*range)
end
end
def down
end
end
......@@ -4,17 +4,25 @@ class CreateProjectMirrorData < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
return if table_exists?(:project_mirror_data)
create_table :project_mirror_data do |t|
t.references :project, index: true, foreign_key: { on_delete: :cascade }
t.string :status
t.string :jid
t.text :last_error
if table_exists?(:project_mirror_data)
add_column :project_mirror_data, :status, :string unless column_exists?(:project_mirror_data, :status)
add_column :project_mirror_data, :jid, :string unless column_exists?(:project_mirror_data, :jid)
add_column :project_mirror_data, :last_error, :text unless column_exists?(:project_mirror_data, :last_error)
else
create_table :project_mirror_data do |t|
t.references :project, index: true, foreign_key: { on_delete: :cascade }
t.string :status
t.string :jid
t.text :last_error
end
end
end
def down
drop_table(:project_mirror_data) if table_exists?(:project_mirror_data)
remove_column :project_mirror_data, :status
remove_column :project_mirror_data, :jid
remove_column :project_mirror_data, :last_error
# ee/db/migrate/20170509153720_create_project_mirror_data_ee.rb will remove the table.
end
end
......@@ -29,6 +29,6 @@ class CreateRemoteMirrors < ActiveRecord::Migration[4.2]
end
def down
drop_table(:remote_mirrors) if table_exists?(:remote_mirrors)
# ee/db/migrate/20160321161032_create_remote_mirrors_ee.rb will remove the table
end
end
......@@ -10,6 +10,6 @@ class AddRemoteMirrorAvailableOverriddenToProjects < ActiveRecord::Migration[4.2
end
def down
remove_column(:projects, :remote_mirror_available_overridden) if column_exists?(:projects, :remote_mirror_available_overridden)
# ee/db/migrate/20171017130239_add_remote_mirror_available_overridden_to_projects_ee.rb will remove the column.
end
end
......@@ -10,6 +10,7 @@ class AddIndexesToRemoteMirror < ActiveRecord::Migration[4.2]
end
def down
# ee/db/migrate/20170208144550_add_index_to_mirrors_last_update_at_fields.rb will remove the index.
# rubocop:disable Migration/RemoveIndex
remove_index :remote_mirrors, :last_successful_update_at if index_exists? :remote_mirrors, :last_successful_update_at
end
......
......@@ -10,6 +10,6 @@ class AddMirrorAvailableToApplicationSettings < ActiveRecord::Migration[4.2]
end
def down
remove_column(:application_settings, :mirror_available) if column_exists?(:application_settings, :mirror_available)
# ee/db/migrate/20171017125928_add_remote_mirror_available_to_application_settings.rb will remove the column.
end
end
This source diff could not be displayed because it is too large. You can view the blob instead.
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class CleanUpForMembers < ActiveRecord::Migration[4.2]
class UpdateDesignsIndex < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
......@@ -9,23 +11,11 @@ class CleanUpForMembers < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
class Member < ActiveRecord::Base
include EachBatch
self.table_name = 'members'
end
def up
condition = <<~EOF.squish
invite_token IS NULL AND
NOT EXISTS (SELECT 1 FROM users WHERE users.id = members.user_id)
EOF
Member.each_batch(of: 10_000) do |batch|
batch.where(condition).delete_all
end
remove_concurrent_index :design_management_designs, :issue_id, unique: true
end
def down
add_concurrent_index :design_management_designs, :issue_id, unique: true
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddNameToGeoNodes < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def up
add_column :geo_nodes, :name, :string
# url is also unique, and its type and size is identical to the name column,
# so this is safe.
execute "UPDATE geo_nodes SET name = url;"
# url is also `null: false`, so this is safe.
change_column :geo_nodes, :name, :string, null: false
end
def down
remove_column :geo_nodes, :name
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration[4.2]
disable_ddl_transaction!
class AddNameIndexToGeoNodes < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
MIGRATION = 'CreateGpgKeySubkeysFromGpgKeys'
class GpgKey < ActiveRecord::Base
self.table_name = 'gpg_keys'
include EachBatch
end
disable_ddl_transaction!
def up
GpgKey.select(:id).each_batch do |gpg_keys|
jobs = gpg_keys.pluck(:id).map do |id|
[MIGRATION, [id]]
end
BackgroundMigrationWorker.bulk_perform_async(jobs)
end
add_concurrent_index :geo_nodes, :name, unique: true
end
def down
remove_concurrent_index :geo_nodes, :name
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class RemoveUrlIndexFromGeoNodes < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
remove_concurrent_index :geo_nodes, :url
end
def down
add_concurrent_index :geo_nodes, :url, unique: true
end
end
# frozen_string_literal: true
class AddCommentToVulnerabilityFeedback < ActiveRecord::Migration[5.1]
DOWNTIME = false
def up
add_column :vulnerability_feedback, :comment_author_id, :integer
add_column :vulnerability_feedback, :comment, :text
add_column :vulnerability_feedback, :comment_timestamp, :datetime_with_timezone
end
def down
remove_column :vulnerability_feedback, :comment_author_id
remove_column :vulnerability_feedback, :comment
remove_column :vulnerability_feedback, :comment_timestamp
end
end
# frozen_string_literal: true
# rubocop: disable Migration/AddConcurrentForeignKey
class UpdateInsightsForeignKeys < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
remove_foreign_key_if_exists(:insights, column: :project_id)
add_foreign_key(:insights, :projects, column: :project_id, on_delete: :cascade)
remove_foreign_key_if_exists(:insights, column: :namespace_id)
add_foreign_key(:insights, :namespaces, column: :namespace_id, on_delete: :cascade)
end
def down
remove_foreign_key_if_exists(:insights, column: :namespace_id)
add_foreign_key(:insights, :namespaces, column: :namespace_id)
remove_foreign_key_if_exists(:insights, column: :project_id)
add_foreign_key(:insights, :projects, column: :project_id)
end
end
# frozen_string_literal: true
class AddForeignKeyFromVulnerabilityFeedbackToUsers < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :vulnerability_feedback, :users, column: :comment_author_id, on_delete: :nullify
add_concurrent_index :vulnerability_feedback, :comment_author_id
end
def down
remove_foreign_key :vulnerability_feedback, column: :comment_author_id
remove_concurrent_index :vulnerability_feedback, :comment_author_id
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddBridgedPipelineIdToBridges < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def change
add_column :ci_builds, :upstream_pipeline_id, :integer
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddBridgedPipelineIdForeignKey < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :ci_builds, :upstream_pipeline_id, where: 'upstream_pipeline_id IS NOT NULL'
add_concurrent_foreign_key :ci_builds, :ci_pipelines, column: :upstream_pipeline_id
end
def down
remove_foreign_key :ci_builds, column: :upstream_pipeline_id
remove_concurrent_index :ci_builds, :upstream_pipeline_id
end
end
# frozen_string_literal: true
class AddIndexToProjectsMirrorUserId < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :projects, :mirror_user_id
end
def down
remove_concurrent_index :projects, :mirror_user_id
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddRuleTypeToApprovalMergeRequestApprovalRules < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default(:approval_merge_request_rules, :rule_type, :integer, limit: 2, default: 1)
end
def down
remove_column(:approval_merge_request_rules, :rule_type)
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddIndexForCodeOwnerRuleTypeOnApprovalMergeRequestRules < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
INDEX_CODE_OWNERS_RULES_UNIQUENESS_NAME = 'index_approval_rule_name_for_code_owners_rule_type'
INDEX_CODE_OWNERS_RULES_QUERY_NAME = 'index_approval_rules_code_owners_rule_type'
class ApprovalMergeRequestRule < ActiveRecord::Base
include EachBatch
enum rule_types: {
regular: 1,
code_owner: 2
}
end
def up
# Ensure only 1 code_owner rule per merge_request
add_concurrent_index(
:approval_merge_request_rules,
[:merge_request_id, :rule_type, :name],
unique: true,
where: "rule_type = #{ApprovalMergeRequestRule.rule_types[:code_owner]}",
name: INDEX_CODE_OWNERS_RULES_UNIQUENESS_NAME
)
# Support lookups for all code_owner rules per merge_request
add_concurrent_index(
:approval_merge_request_rules,
[:merge_request_id, :rule_type],
where: "rule_type = #{ApprovalMergeRequestRule.rule_types[:code_owner]}",
name: INDEX_CODE_OWNERS_RULES_QUERY_NAME
)
end
def down
remove_concurrent_index_by_name(
:approval_merge_request_rules,
INDEX_CODE_OWNERS_RULES_UNIQUENESS_NAME
)
remove_concurrent_index_by_name(
:approval_merge_request_rules,
INDEX_CODE_OWNERS_RULES_QUERY_NAME
)
end
end
# frozen_string_literal: true
class AddIndexToCountPendingMirrorUpdates < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :project_mirror_data, [:last_update_at, :retry_count]
end
def down
remove_concurrent_index :project_mirror_data, [:last_update_at, :retry_count]
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class DropUserActivitiesTable < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
# This migration is a no-op. It just exists to match EE.
def change
# When using the methods "add_concurrent_index" or "add_column_with_default"
# you must disable the use of transactions as these methods can not run in an
# existing transaction. When using "add_concurrent_index" make sure that this
# method is the _only_ method called in the migration, any other changes
# should go in a separate migration. This ensures that upon failure _only_ the
# index creation fails and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
# disable_ddl_transaction!
def up
drop_table :user_activities if table_exists?(:user_activities)
end
def down
unless table_exists?(:user_activities)
create_table "user_activities", force: :cascade do |t|
t.integer "user_id"
t.datetime "last_activity_at", null: false
end
add_index "user_activities", ["user_id"], name: "index_user_activities_on_user_id", unique: true, using: :btree
end
end
end
# This is the counterpart of RequeuePendingDeleteProjects and cleans all
# projects with `pending_delete = true` and that do not have a namespace.
class CleanupNamespacelessPendingDeleteProjects < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
@offset = 0
loop do
ids = pending_delete_batch
break if ids.empty?
args = ids.map { |id| Array(id) }
NamespacelessProjectDestroyWorker.bulk_perform_async(args)
@offset += 1
end
end
def down
# noop
end
private
def pending_delete_batch
connection.exec_query(find_batch).map { |row| row['id'].to_i }
end
BATCH_SIZE = 5000
def find_batch
projects = Arel::Table.new(:projects)
projects.project(projects[:id])
.where(projects[:pending_delete].eq(true))
.where(projects[:namespace_id].eq(nil))
.skip(@offset * BATCH_SIZE)
.take(BATCH_SIZE)
.to_sql
end
end
class ScheduleMergeRequestDiffMigrations < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 2500
MIGRATION = 'DeserializeMergeRequestDiffsAndCommits'
disable_ddl_transaction!
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
include ::EachBatch
end
# Assuming that there are 5 million rows affected (which is more than on
# GitLab.com), and that each batch of 2,500 rows takes up to 5 minutes, then
# we can migrate all the rows in 7 days.
#
# On staging, plucking the IDs themselves takes 5 seconds.
def up
non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
MergeRequestDiff.where(non_empty).each_batch(of: BATCH_SIZE) do |relation, index|
range = relation.pluck('MIN(id)', 'MAX(id)').first
BackgroundMigrationWorker.perform_in(index * 5.minutes, MIGRATION, range)
end
end
def down
end
end
class ScheduleMergeRequestDiffMigrationsTakeTwo < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 500
MIGRATION = 'DeserializeMergeRequestDiffsAndCommits'
DELAY_INTERVAL = 10.minutes
disable_ddl_transaction!
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
include ::EachBatch
default_scope { where('st_commits IS NOT NULL OR st_diffs IS NOT NULL') }
end
# By this point, we assume ScheduleMergeRequestDiffMigrations - the first
# version of this - has already run. On GitLab.com, we have ~220k un-migrated
# rows, but these rows will, in general, take a long time.
#
# With a gap of 10 minutes per batch, and 500 rows per batch, these migrations
# are scheduled over 220_000 / 500 / 6 ~= 74 hours, which is a little over
# three days.
def up
queue_background_migration_jobs_by_range_at_intervals(MergeRequestDiff, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
def down
end
end
# frozen_string_literal: true
class SchedulePopulateMergeRequestMetricsWithEventsData < ActiveRecord::Migration[4.2]
DOWNTIME = false
BATCH_SIZE = 10_000
MIGRATION = 'PopulateMergeRequestMetricsWithEventsData'
disable_ddl_transaction!
class MergeRequest < ActiveRecord::Base
self.table_name = 'merge_requests'
include ::EachBatch
end
def up
say 'Scheduling `PopulateMergeRequestMetricsWithEventsData` jobs'
# It will update around 4_000_000 records in batches of 10_000 merge
# requests (running between 10 minutes) and should take around 66 hours to complete.
# Apparently, production PostgreSQL is able to vacuum 10k-20k dead_tuples by
# minute, and at maximum, each of these jobs should UPDATE 20k records.
#
# More information about the updates in `PopulateMergeRequestMetricsWithEventsData` class.
#
MergeRequest.all.each_batch(of: BATCH_SIZE) do |relation, index|
range = relation.pluck('MIN(id)', 'MAX(id)').first
BackgroundMigrationWorker.perform_in(index * 10.minutes, MIGRATION, range)
end
end
def down
execute "update merge_request_metrics set latest_closed_at = null"
execute "update merge_request_metrics set latest_closed_by_id = null"
execute "update merge_request_metrics set merged_by_id = null"
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class RemoveSoftRemovedObjects < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
disable_ddl_transaction!
module SoftRemoved
extend ActiveSupport::Concern
included do
scope :soft_removed, -> { where('deleted_at IS NOT NULL') }
end
end
class User < ActiveRecord::Base
self.table_name = 'users'
include EachBatch
end
class Issue < ActiveRecord::Base
self.table_name = 'issues'
include EachBatch
include SoftRemoved
end
class MergeRequest < ActiveRecord::Base
self.table_name = 'merge_requests'
include EachBatch
include SoftRemoved
end
class Namespace < ActiveRecord::Base
self.table_name = 'namespaces'
include EachBatch
include SoftRemoved
scope :soft_removed_personal, -> { soft_removed.where(type: nil) }
scope :soft_removed_group, -> { soft_removed.where(type: 'Group') }
end
class Route < ActiveRecord::Base
self.table_name = 'routes'
include EachBatch
include SoftRemoved
end
class Project < ActiveRecord::Base
self.table_name = 'projects'
include EachBatch
include SoftRemoved
end
class CiPipelineSchedule < ActiveRecord::Base
self.table_name = 'ci_pipeline_schedules'
include EachBatch
include SoftRemoved
end
class CiTrigger < ActiveRecord::Base
self.table_name = 'ci_triggers'
include EachBatch
include SoftRemoved
end
MODELS = [Issue, MergeRequest, CiPipelineSchedule, CiTrigger].freeze
def up
disable_statement_timeout do
remove_personal_routes
remove_personal_namespaces
remove_group_namespaces
remove_simple_soft_removed_rows
end
end
def down
# The data removed by this migration can't be restored in an automated way.
end
def remove_simple_soft_removed_rows
create_temporary_indexes
MODELS.each do |model|
say_with_time("Removing soft removed rows from #{model.table_name}") do
model.soft_removed.each_batch do |batch, index|
batch.delete_all
end
end
end
ensure
remove_temporary_indexes
end
def create_temporary_indexes
MODELS.each do |model|
index_name = temporary_index_name_for(model)
# Without this index the removal process can take a very long time. For
# example, getting the next ID of a batch for the `issues` table in
# staging would take between 15 and 20 seconds.
next if temporary_index_exists?(model)
say_with_time("Creating temporary index #{index_name}") do
add_concurrent_index(
model.table_name,
[:deleted_at, :id],
name: index_name,
where: 'deleted_at IS NOT NULL'
)
end
end
end
def remove_temporary_indexes
MODELS.each do |model|
index_name = temporary_index_name_for(model)
next unless temporary_index_exists?(model)
say_with_time("Removing temporary index #{index_name}") do
remove_concurrent_index_by_name(model.table_name, index_name)
end
end
end
def temporary_index_name_for(model)
"index_on_#{model.table_name}_tmp"
end
def temporary_index_exists?(model)
index_name = temporary_index_name_for(model)
index_exists?(model.table_name, [:deleted_at, :id], name: index_name)
end
def remove_personal_namespaces
# Some personal namespaces are left behind in case of GitLab.com. In these
# cases the associated data such as the projects and users has already been
# removed.
Namespace.soft_removed_personal.each_batch do |batch|
batch.delete_all
end
end
def remove_group_namespaces
admin_id = id_for_admin_user
unless admin_id
say 'Not scheduling soft removed groups for removal as no admin user ' \
'could be found. You will need to remove any such groups manually.'
return
end
# Left over groups can't be easily removed because we may also need to
# remove memberships, repositories, and other associated data. As a result
# we'll just schedule a Sidekiq job to remove these.
#
# As of January 5th, 2018 there are 36 groups that will be removed using
# this code.
Namespace.select(:id).soft_removed_group.each_batch(of: 10) do |batch, index|
batch.each do |ns|
schedule_group_removal(index * 5.minutes, ns.id, admin_id)
end
end
end
def schedule_group_removal(delay, group_id, user_id)
if migrate_inline?
GroupDestroyWorker.new.perform(group_id, user_id)
else
GroupDestroyWorker.perform_in(delay, group_id, user_id)
end
end
def remove_personal_routes
namespaces = Namespace.select(1)
.soft_removed
.where('namespaces.type IS NULL')
.where('routes.source_type = ?', 'Namespace')
.where('routes.source_id = namespaces.id')
Route.where('EXISTS (?)', namespaces).each_batch do |batch|
batch.delete_all
end
end
def id_for_admin_user
User.where(admin: true).limit(1).pluck(:id).first
end
def migrate_inline?
Rails.env.test? || Rails.env.development?
end
end
class MigrateImportAttributesDataFromProjectsToProjectMirrorData < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
UP_MIGRATION = 'PopulateImportState'.freeze
DOWN_MIGRATION = 'RollbackImportStateData'.freeze
BATCH_SIZE = 1000
DELAY_INTERVAL = 5.minutes
disable_ddl_transaction!
class Project < ActiveRecord::Base
include EachBatch
self.table_name = 'projects'
end
class ProjectImportState < ActiveRecord::Base
include EachBatch
self.table_name = 'project_mirror_data'
end
def up
projects = Project.where.not(import_status: :none)
queue_background_migration_jobs_by_range_at_intervals(projects, UP_MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
def down
import_state = ProjectImportState.where.not(status: :none)
queue_background_migration_jobs_by_range_at_intervals(import_state, DOWN_MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
end
end
class MigrateRemainingMrMetricsPopulatingBackgroundMigration < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 5_000
MIGRATION = 'PopulateMergeRequestMetricsWithEventsData'
DELAY_INTERVAL = 10.minutes
disable_ddl_transaction!
class MergeRequest < ActiveRecord::Base
self.table_name = 'merge_requests'
include ::EachBatch
end
def up
# Perform any ongoing background migration that might still be running. This
# avoids scheduling way too many of the same jobs on self-hosted instances
# if they're updating GitLab across multiple versions. The "Take one"
# migration was executed on 10.4 on
# SchedulePopulateMergeRequestMetricsWithEventsData.
Gitlab::BackgroundMigration.steal(MIGRATION)
metrics_not_exists_clause = <<~SQL
NOT EXISTS (SELECT 1 FROM merge_request_metrics
WHERE merge_request_metrics.merge_request_id = merge_requests.id)
SQL
relation = MergeRequest.where(metrics_not_exists_clause)
# We currently have ~400_000 MR records without metrics on GitLab.com.
# This means it'll schedule ~80 jobs (5000 MRs each) with a 10 minutes gap,
# so this should take ~14 hours for all background migrations to complete.
#
queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION,
DELAY_INTERVAL,
batch_size: BATCH_SIZE)
end
def down
end
end
class EnqueueDeleteDiffFilesWorkers < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
SCHEDULER = 'ScheduleDiffFilesDeletion'.freeze
TMP_INDEX = 'tmp_partial_diff_id_with_files_index'.freeze
disable_ddl_transaction!
def up
unless index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
add_concurrent_index(:merge_request_diffs, :id, where: "(state NOT IN ('without_files', 'empty'))", name: TMP_INDEX)
end
BackgroundMigrationWorker.perform_async(SCHEDULER)
# We don't remove the index since it's going to be used on DeleteDiffFiles
# worker. We should remove it in an upcoming release.
end
def down
if index_exists_by_name?(:merge_request_diffs, TMP_INDEX)
remove_concurrent_index_by_name(:merge_request_diffs, TMP_INDEX)
end
end
end
# frozen_string_literal: true
class DeleteInconsistentInternalIdRecords < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
# This migration cleans up any inconsistent records in internal_ids.
#
# That is, it deletes records that track a `last_value` that is
# smaller than the maximum internal id (usually `iid`) found in
# the corresponding model records.
def up
disable_statement_timeout do
delete_internal_id_records('issues', 'project_id')
delete_internal_id_records('merge_requests', 'project_id', 'target_project_id')
delete_internal_id_records('deployments', 'project_id')
delete_internal_id_records('milestones', 'project_id')
delete_internal_id_records('milestones', 'namespace_id', 'group_id')
delete_internal_id_records('ci_pipelines', 'project_id')
end
end
class InternalId < ActiveRecord::Base
self.table_name = 'internal_ids'
enum usage: { issues: 0, merge_requests: 1, deployments: 2, milestones: 3, epics: 4, ci_pipelines: 5 }
end
private
def delete_internal_id_records(base_table, scope_column_name, base_scope_column_name = scope_column_name)
sql = <<~SQL
SELECT id FROM ( -- workaround for MySQL
SELECT internal_ids.id FROM (
SELECT #{base_scope_column_name} AS #{scope_column_name}, max(iid) as maximum_iid from #{base_table} GROUP BY #{scope_column_name}
) maxima JOIN internal_ids USING (#{scope_column_name})
WHERE internal_ids.usage=#{InternalId.usages.fetch(base_table)} AND maxima.maximum_iid > internal_ids.last_value
) internal_ids
SQL
InternalId.where("id IN (#{sql})").tap do |ids| # rubocop:disable GitlabSecurity/SqlInjection
say "Deleting internal_id records for #{base_table}: #{ids.pluck(:project_id, :last_value)}" unless ids.empty?
end.delete_all
end
end
# frozen_string_literal: true
class RemoveOrphanedLabelLinks < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class LabelLinks < ActiveRecord::Base
self.table_name = 'label_links'
include EachBatch
def self.orphaned
where('NOT EXISTS ( SELECT 1 FROM labels WHERE labels.id = label_links.label_id )')
end
end
def up
# Some of these queries can take up to 10 seconds to run on GitLab.com,
# which is pretty close to our 15 second statement timeout. To ensure a
# smooth deployment procedure we disable the statement timeouts for this
# migration, just in case.
disable_statement_timeout do
# On GitLab.com there are over 2,000,000 orphaned label links. On
# staging, removing 100,000 rows generated a max replication lag of 6.7
# MB. In total, removing all these rows will only generate about 136 MB
# of data, so it should be safe to do this.
LabelLinks.orphaned.each_batch(of: 100_000) do |batch|
batch.delete_all
end
end
add_concurrent_foreign_key(:label_links, :labels, column: :label_id, on_delete: :cascade)
end
def down
# There is no way to restore orphaned label links.
if foreign_key_exists?(:label_links, column: :label_id)
remove_foreign_key(:label_links, column: :label_id)
end
end
end
# frozen_string_literal: true
class ConsumeRemainingDiffFilesDeletionJobs < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
MIGRATION = 'ScheduleDiffFilesDeletion'.freeze
TMP_INDEX = 'tmp_partial_diff_id_with_files_index'.freeze
def up
# Perform any ongoing background migration that might still be scheduled.
Gitlab::BackgroundMigration.steal(MIGRATION)
remove_concurrent_index_by_name(:merge_request_diffs, TMP_INDEX)
end
def down
add_concurrent_index(:merge_request_diffs, :id, where: "(state NOT IN ('without_files', 'empty'))", name: TMP_INDEX)
end
end
# frozen_string_literal: true
class EnqueueRedactLinks < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 1000
DELAY_INTERVAL = 5.minutes.to_i
MIGRATION = 'RedactLinks'
disable_ddl_transaction!
class Note < ActiveRecord::Base
include EachBatch
self.table_name = 'notes'
self.inheritance_column = :_type_disabled
end
class Issue < ActiveRecord::Base
include EachBatch
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
end
class MergeRequest < ActiveRecord::Base
include EachBatch
self.table_name = 'merge_requests'
self.inheritance_column = :_type_disabled
end
class Snippet < ActiveRecord::Base
include EachBatch
self.table_name = 'snippets'
self.inheritance_column = :_type_disabled
end
def up
disable_statement_timeout do
schedule_migration(Note, 'note')
schedule_migration(Issue, 'description')
schedule_migration(MergeRequest, 'description')
schedule_migration(Snippet, 'description')
end
end
def down
# nothing to do
end
private
def schedule_migration(model, field)
link_pattern = "%/sent_notifications/" + ("_" * 32) + "/unsubscribe%"
model.where("#{field} like ?", link_pattern).each_batch(of: BATCH_SIZE) do |batch, index|
start_id, stop_id = batch.pluck('MIN(id)', 'MAX(id)').first
BackgroundMigrationWorker.perform_in(index * DELAY_INTERVAL, MIGRATION, [model.name.demodulize, field, start_id, stop_id])
end
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class PopulateMrMetricsWithEventsData < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 10_000
MIGRATION = 'PopulateMergeRequestMetricsWithEventsDataImproved'
PREVIOUS_MIGRATION = 'PopulateMergeRequestMetricsWithEventsData'
disable_ddl_transaction!
def up
# Perform any ongoing background migration that might still be running from
# previous try (see https://gitlab.com/gitlab-org/gitlab-ce/issues/47676).
Gitlab::BackgroundMigration.steal(PREVIOUS_MIGRATION)
say 'Scheduling `PopulateMergeRequestMetricsWithEventsData` jobs'
# It will update around 4_000_000 records in batches of 10_000 merge
# requests (running between 5 minutes) and should take around 53 hours to complete.
# Apparently, production PostgreSQL is able to vacuum 10k-20k dead_tuples
# per minute. So this should give us enough space.
#
# More information about the updates in `PopulateMergeRequestMetricsWithEventsDataImproved` class.
#
MergeRequest.all.each_batch(of: BATCH_SIZE) do |relation, index|
range = relation.pluck('MIN(id)', 'MAX(id)').first
BackgroundMigrationWorker.perform_in(index * 8.minutes, MIGRATION, range)
end
end
def down
end
end
# frozen_string_literal: true
class ScheduleMergeRequestAssigneesMigrationProgressCheck < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
MIGRATION = 'MergeRequestAssigneesMigrationProgressCheck'.freeze
DOWNTIME = false
disable_ddl_transaction!
def up
BackgroundMigrationWorker.perform_async(MIGRATION)
end
def down
end
end
# frozen_string_literal: true
class AddUniqueConstraintToApprovalsUserIdAndMergeRequestId < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_duplicates
add_concurrent_index :approvals, [:user_id, :merge_request_id], unique: true
end
def down
remove_concurrent_index :approvals, [:user_id, :merge_request_id]
end
private
def remove_duplicates
add_concurrent_index :approvals, [:user_id, :merge_request_id, :id]
if Gitlab::Database.mysql?
execute <<-SQL
DELETE FROM a
USING approvals AS a
INNER JOIN (
SELECT user_id, merge_request_id, MIN(id) as min_id
FROM approvals
GROUP BY user_id, merge_request_id
HAVING COUNT(id) > 1
) as approvals_with_duplicates
ON approvals_with_duplicates.user_id = a.user_id
AND approvals_with_duplicates.merge_request_id = a.merge_request_id
WHERE approvals_with_duplicates.min_id <> a.id;
SQL
else
execute <<-SQL
DELETE FROM approvals
USING (
SELECT user_id, merge_request_id, MIN(id) as min_id
FROM approvals
GROUP BY user_id, merge_request_id
HAVING COUNT(id) > 1
) as approvals_with_duplicates
WHERE approvals_with_duplicates.user_id = approvals.user_id
AND approvals_with_duplicates.merge_request_id = approvals.merge_request_id
AND approvals_with_duplicates.min_id <> approvals.id;
SQL
end
remove_concurrent_index :approvals, [:user_id, :merge_request_id, :id]
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
# We are reverting the feature that created this column. This is for anyone who
# migrated while the feature still existed in master.
class RemoveAlternateUrlFromGeoNodes < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def up
remove_column(:geo_nodes, :alternate_url) if column_exists?(:geo_nodes, :alternate_url)
end
def down
add_column :geo_nodes, :alternate_url, :string
end
end
# frozen_string_literal: true
class PopulateProjectStatisticsPackagesSize < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class ProjectStatistics < ActiveRecord::Base
self.table_name = 'project_statistics'
end
def up
stats_ids = ProjectStatistics.joins(
<<~SQL.strip_heredoc
INNER JOIN projects ON projects.id = project_statistics.project_id
INNER JOIN packages_packages ON packages_packages.project_id = projects.id
INNER JOIN packages_package_files ON packages_package_files.package_id = packages_packages.id
SQL
).distinct.select(:id)
packages_size = Arel.sql(
'(SELECT SUM(size) FROM packages_package_files ' \
'JOIN packages_packages ON packages_packages.id = packages_package_files.package_id ' \
'WHERE packages_packages.project_id = project_statistics.project_id)'
)
update_column_in_batches(:project_statistics, :packages_size, packages_size) do |table, query|
query.where(table[:id].in(stats_ids))
end
storage_size = Arel.sql('(repository_size + lfs_objects_size + build_artifacts_size + COALESCE(packages_size, 0))')
update_column_in_batches(:project_statistics, :storage_size, storage_size) do |table, query|
query.where(table[:id].in(stats_ids))
end
end
def down
storage_size = Arel.sql('(repository_size + lfs_objects_size + build_artifacts_size)')
update_column_in_batches(:project_statistics, :storage_size, storage_size) do |table, query|
query.where(table[:packages_size].gt(0))
end
update_column_in_batches(:project_statistics, :packages_size, nil)
end
end
# frozen_string_literal: true
class RemoveUsersSupportType < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_STATE_INTERNAL_ATTRS = 'index_users_on_state_and_internal_attrs'
disable_ddl_transaction!
def up
remove_concurrent_index :users, :state, name: INDEX_STATE_INTERNAL_ATTRS
remove_concurrent_index :users, :support_bot
remove_column :users, :support_bot
end
def down
add_column :users, :support_bot, :boolean
add_concurrent_index :users, :support_bot
add_concurrent_index :users, :state,
name: INDEX_STATE_INTERNAL_ATTRS,
where: 'ghost <> true AND support_bot <> true'
end
end
......@@ -3,31 +3,32 @@
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class PopulateExternalPipelineSource < ActiveRecord::Migration[4.2]
class PopulateRuleTypeOnApprovalMergeRequestRules < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
MIGRATION = 'PopulateExternalPipelineSource'.freeze
BATCH_SIZE = 500
disable_ddl_transaction!
class Pipeline < ActiveRecord::Base
class ApprovalMergeRequestRule < ActiveRecord::Base
include EachBatch
self.table_name = 'ci_pipelines'
enum rule_types: {
regular: 1,
code_owner: 2
}
end
def up
Pipeline.where(source: nil).tap do |relation|
queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION,
5.minutes,
batch_size: BATCH_SIZE)
# On Gitlab.com, this should update about 17k rows. Since our updates are
# small and we are populating prior to indexing, the overhead should be small
ApprovalMergeRequestRule.where(code_owner: true).each_batch do |batch|
batch.update_all(rule_type: ApprovalMergeRequestRule.rule_types[:code_owner])
end
end
def down
# noop
# code_owner is already kept in sync with `rule_type`, so no changes are needed
end
end
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -147,7 +147,6 @@ The `ModelConfigurationSpec` checks and confirms the addition of new models:
If you think this model should be included in the export, please add it to `#{Gitlab::ImportExport.config_file}`.
Definitely add it to `#{File.expand_path(ce_models_yml)}`
#{"or `#{File.expand_path(ee_models_yml)}` if the model/associations are EE-specific\n" if ee_models_hash.any?}
to signal that you've handled this error and to prevent it from showing up in the future.
MSG
```
......@@ -253,7 +252,7 @@ Model relationships to be included in the project import/export:
```yaml
project_tree:
- labels:
:priorities
- :priorities
- milestones:
- events:
- :push_event_payload
......
# frozen_string_literal: true
# rubocop:disable Style/Documentation
class Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys
class GpgKey < ActiveRecord::Base
self.table_name = 'gpg_keys'
include EachBatch
include ShaAttribute
sha_attribute :primary_keyid
sha_attribute :fingerprint
has_many :subkeys, class_name: 'GpgKeySubkey'
end
class GpgKeySubkey < ActiveRecord::Base
self.table_name = 'gpg_key_subkeys'
include ShaAttribute
sha_attribute :keyid
sha_attribute :fingerprint
end
def perform(gpg_key_id)
gpg_key = GpgKey.find_by(id: gpg_key_id)
return if gpg_key.nil?
return if gpg_key.subkeys.any?
create_subkeys(gpg_key)
update_signatures(gpg_key)
end
private
def create_subkeys(gpg_key)
gpg_subkeys = Gitlab::Gpg.subkeys_from_key(gpg_key.key)
gpg_subkeys[gpg_key.primary_keyid.upcase]&.each do |subkey_data|
gpg_key.subkeys.build(keyid: subkey_data[:keyid], fingerprint: subkey_data[:fingerprint])
end
# Improve latency by doing all INSERTs in a single call
GpgKey.transaction do
gpg_key.save!
end
end
def update_signatures(gpg_key)
return unless gpg_key.subkeys.exists?
InvalidGpgSignatureUpdateWorker.perform_async(gpg_key.id)
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class DeleteDiffFiles
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
belongs_to :merge_request
has_many :merge_request_diff_files
end
class MergeRequestDiffFile < ActiveRecord::Base
self.table_name = 'merge_request_diff_files'
end
DEAD_TUPLES_THRESHOLD = 50_000
VACUUM_WAIT_TIME = 5.minutes
def perform(ids)
@ids = ids
# We should reschedule until deadtuples get in a desirable
# state (e.g. < 50_000). That may take more than one reschedule.
#
if should_wait_deadtuple_vacuum?
reschedule
return
end
prune_diff_files
end
def should_wait_deadtuple_vacuum?
return false unless Gitlab::Database.postgresql?
diff_files_dead_tuples_count >= DEAD_TUPLES_THRESHOLD
end
private
def reschedule
BackgroundMigrationWorker.perform_in(VACUUM_WAIT_TIME, self.class.name.demodulize, [@ids])
end
def diffs_collection
MergeRequestDiff.where(id: @ids)
end
def diff_files_dead_tuples_count
dead_tuple =
execute_statement("SELECT n_dead_tup FROM pg_stat_all_tables "\
"WHERE relname = 'merge_request_diff_files'")[0]
dead_tuple&.fetch('n_dead_tup', 0).to_i
end
def prune_diff_files
removed = 0
updated = 0
MergeRequestDiff.transaction do
updated = diffs_collection.update_all(state: 'without_files')
removed = MergeRequestDiffFile.where(merge_request_diff_id: @ids).delete_all
end
log_info("Removed #{removed} merge_request_diff_files rows, "\
"updated #{updated} merge_request_diffs rows")
end
def execute_statement(sql)
ActiveRecord::Base.connection.execute(sql)
end
def log_info(message)
Rails.logger.info("BackgroundMigration::DeleteDiffFiles - #{message}")
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/MethodLength
# rubocop:disable Metrics/AbcSize
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class DeserializeMergeRequestDiffsAndCommits
attr_reader :diff_ids, :commit_rows, :file_rows
class Error < StandardError
def backtrace
cause.backtrace
end
end
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
end
BUFFER_ROWS = 1000
DIFF_FILE_BUFFER_ROWS = 100
def perform(start_id, stop_id)
merge_request_diffs = MergeRequestDiff
.select(:id, :st_commits, :st_diffs)
.where('st_commits IS NOT NULL OR st_diffs IS NOT NULL')
.where(id: start_id..stop_id)
reset_buffers!
merge_request_diffs.each do |merge_request_diff|
commits, files = single_diff_rows(merge_request_diff)
diff_ids << merge_request_diff.id
commit_rows.concat(commits)
file_rows.concat(files)
if diff_ids.length > BUFFER_ROWS ||
commit_rows.length > BUFFER_ROWS ||
file_rows.length > DIFF_FILE_BUFFER_ROWS
flush_buffers!
end
end
flush_buffers!
rescue => e
Rails.logger.info("#{self.class.name}: failed for IDs #{merge_request_diffs.map(&:id)} with #{e.class.name}")
raise Error.new(e.inspect)
end
private
def reset_buffers!
@diff_ids = []
@commit_rows = []
@file_rows = []
end
def flush_buffers!
if diff_ids.any?
commit_rows.each_slice(BUFFER_ROWS).each do |commit_rows_slice|
bulk_insert('merge_request_diff_commits', commit_rows_slice)
end
file_rows.each_slice(DIFF_FILE_BUFFER_ROWS).each do |file_rows_slice|
bulk_insert('merge_request_diff_files', file_rows_slice)
end
MergeRequestDiff.where(id: diff_ids).update_all(st_commits: nil, st_diffs: nil)
end
reset_buffers!
end
def bulk_insert(table, rows)
Gitlab::Database.bulk_insert(table, rows)
rescue ActiveRecord::RecordNotUnique
ids = rows.map { |row| row[:merge_request_diff_id] }.uniq.sort
Rails.logger.info("#{self.class.name}: rows inserted twice for IDs #{ids}")
end
def single_diff_rows(merge_request_diff)
sha_attribute = Gitlab::Database::ShaAttribute.new
commits = YAML.load(merge_request_diff.st_commits) rescue []
commits ||= []
commit_rows = commits.map.with_index do |commit, index|
commit_hash = commit.to_hash.with_indifferent_access.except(:parent_ids)
sha = commit_hash.delete(:id)
commit_hash.merge(
merge_request_diff_id: merge_request_diff.id,
relative_order: index,
sha: sha_attribute.serialize(sha)
)
end
diffs = YAML.load(merge_request_diff.st_diffs) rescue []
diffs = [] unless valid_raw_diffs?(diffs)
file_rows = diffs.map.with_index do |diff, index|
diff_hash = diff.to_hash.with_indifferent_access.merge(
binary: false,
merge_request_diff_id: merge_request_diff.id,
relative_order: index
)
diff_hash.tap do |hash|
diff_text = hash[:diff]
hash[:too_large] = !!hash[:too_large]
hash[:a_mode] ||= guess_mode(hash[:new_file], hash[:diff])
hash[:b_mode] ||= guess_mode(hash[:deleted_file], hash[:diff])
# Compatibility with old diffs created with Psych.
if diff_text.encoding == Encoding::BINARY && !diff_text.ascii_only?
hash[:binary] = true
hash[:diff] = [diff_text].pack('m0')
end
end
end
[commit_rows, file_rows]
end
# This doesn't have to be 100% accurate, because it's only used for
# display - it won't change file modes in the repository. Submodules are
# created as 600, regular files as 644.
def guess_mode(file_missing, diff)
return '0' if file_missing
diff.include?('Subproject commit') ? '160000' : '100644'
end
# Unlike MergeRequestDiff#valid_raw_diff?, don't count Rugged objects as
# valid, because we don't render them usefully anyway.
def valid_raw_diffs?(diffs)
return false unless diffs.respond_to?(:each)
diffs.all? { |diff| diff.is_a?(Hash) }
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop: disable Style/Documentation
class MergeRequestAssigneesMigrationProgressCheck
include Gitlab::Utils::StrongMemoize
RESCHEDULE_DELAY = 3.hours
WORKER = 'PopulateMergeRequestAssigneesTable'.freeze
DeadJobsError = Class.new(StandardError)
def perform
raise DeadJobsError, "Only dead background jobs in the queue for #{WORKER}" if !ongoing? && dead_jobs?
if ongoing?
BackgroundMigrationWorker.perform_in(RESCHEDULE_DELAY, self.class.name)
else
Feature.enable(:multiple_merge_request_assignees)
end
end
private
def dead_jobs?
strong_memoize(:dead_jobs) do
migration_klass.dead_jobs?(WORKER)
end
end
def ongoing?
strong_memoize(:ongoing) do
migration_klass.exists?(WORKER) || migration_klass.retrying_jobs?(WORKER)
end
end
def migration_klass
Gitlab::BackgroundMigration
end
end
# rubocop: enable Style/Documentation
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class PopulateExternalPipelineSource
module Migratable
class Pipeline < ActiveRecord::Base
self.table_name = 'ci_pipelines'
def self.sources
{
unknown: nil,
push: 1,
web: 2,
trigger: 3,
schedule: 4,
api: 5,
external: 6
}
end
end
class CommitStatus < ActiveRecord::Base
self.table_name = 'ci_builds'
self.inheritance_column = :_type_disabled
scope :has_pipeline, -> { where('ci_builds.commit_id=ci_pipelines.id') }
scope :of_type, -> (type) { where('type=?', type) }
end
end
def perform(start_id, stop_id)
external_pipelines(start_id, stop_id)
.update_all(source: Migratable::Pipeline.sources[:external])
end
private
def external_pipelines(start_id, stop_id)
Migratable::Pipeline.where(id: (start_id..stop_id))
.where(
'EXISTS (?) AND NOT EXISTS (?)',
Migratable::CommitStatus.of_type('GenericCommitStatus').has_pipeline.select(1),
Migratable::CommitStatus.of_type('Ci::Build').has_pipeline.select(1)
)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration creates all the records on the
# import state table for projects that are considered imports or forks
class PopulateImportState
def perform(start_id, end_id)
move_attributes_data_to_import_state(start_id, end_id)
rescue ActiveRecord::RecordNotUnique
retry
end
def move_attributes_data_to_import_state(start_id, end_id)
Rails.logger.info("#{self.class.name} - Moving import attributes data to project mirror data table: #{start_id} - #{end_id}")
ActiveRecord::Base.connection.execute <<~SQL
INSERT INTO project_mirror_data (project_id, status, jid, last_error)
SELECT id, import_status, import_jid, import_error
FROM projects
WHERE projects.import_status != 'none'
AND projects.id BETWEEN #{start_id} AND #{end_id}
AND NOT EXISTS (
SELECT id
FROM project_mirror_data
WHERE project_id = projects.id
)
SQL
ActiveRecord::Base.connection.execute <<~SQL
UPDATE projects
SET import_status = 'none'
WHERE import_status != 'none'
AND id BETWEEN #{start_id} AND #{end_id}
SQL
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class PopulateMergeRequestMetricsWithEventsData
def perform(min_merge_request_id, max_merge_request_id)
insert_metrics_for_range(min_merge_request_id, max_merge_request_id)
update_metrics_with_events_data(min_merge_request_id, max_merge_request_id)
end
# Inserts merge_request_metrics records for merge_requests without it for
# a given merge request batch.
def insert_metrics_for_range(min, max)
metrics_not_exists_clause =
<<-SQL.strip_heredoc
NOT EXISTS (SELECT 1 FROM merge_request_metrics
WHERE merge_request_metrics.merge_request_id = merge_requests.id)
SQL
MergeRequest.where(metrics_not_exists_clause).where(id: min..max).each_batch do |batch|
select_sql = batch.select(:id, :created_at, :updated_at).to_sql
execute("INSERT INTO merge_request_metrics (merge_request_id, created_at, updated_at) #{select_sql}")
end
end
def update_metrics_with_events_data(min, max)
if Gitlab::Database.postgresql?
# Uses WITH syntax in order to update merged and closed events with a single UPDATE.
# WITH is not supported by MySQL.
update_events_for_range(min, max)
else
update_merged_events_for_range(min, max)
update_closed_events_for_range(min, max)
end
end
private
# Updates merge_request_metrics latest_closed_at, latest_closed_by_id and merged_by_id
# based on the latest event records on events table for a given merge request batch.
def update_events_for_range(min, max)
sql = <<-SQL.strip_heredoc
WITH events_for_update AS (
SELECT DISTINCT ON (target_id, action) target_id, action, author_id, updated_at
FROM events
WHERE target_id BETWEEN #{min} AND #{max}
AND target_type = 'MergeRequest'
AND action IN (#{Event::CLOSED},#{Event::MERGED})
ORDER BY target_id, action, id DESC
)
UPDATE merge_request_metrics met
SET latest_closed_at = latest_closed.updated_at,
latest_closed_by_id = latest_closed.author_id,
merged_by_id = latest_merged.author_id
FROM (SELECT * FROM events_for_update WHERE action = #{Event::CLOSED}) AS latest_closed
FULL OUTER JOIN
(SELECT * FROM events_for_update WHERE action = #{Event::MERGED}) AS latest_merged
USING (target_id)
WHERE target_id = merge_request_id;
SQL
execute(sql)
end
# Updates merge_request_metrics latest_closed_at, latest_closed_by_id based on the latest closed
# records on events table for a given merge request batch.
def update_closed_events_for_range(min, max)
sql =
<<-SQL.strip_heredoc
UPDATE merge_request_metrics metrics,
(#{select_events(min, max, Event::CLOSED)}) closed_events
SET metrics.latest_closed_by_id = closed_events.author_id,
metrics.latest_closed_at = closed_events.updated_at #{where_matches_closed_events};
SQL
execute(sql)
end
# Updates merge_request_metrics merged_by_id based on the latest merged
# records on events table for a given merge request batch.
def update_merged_events_for_range(min, max)
sql =
<<-SQL.strip_heredoc
UPDATE merge_request_metrics metrics,
(#{select_events(min, max, Event::MERGED)}) merged_events
SET metrics.merged_by_id = merged_events.author_id #{where_matches_merged_events};
SQL
execute(sql)
end
def execute(sql)
@connection ||= ActiveRecord::Base.connection
@connection.execute(sql)
end
def select_events(min, max, action)
select_max_event_id = <<-SQL.strip_heredoc
SELECT max(id)
FROM events
WHERE action = #{action}
AND target_type = 'MergeRequest'
AND target_id BETWEEN #{min} AND #{max}
GROUP BY target_id
SQL
<<-SQL.strip_heredoc
SELECT author_id, updated_at, target_id
FROM events
WHERE id IN(#{select_max_event_id})
SQL
end
def where_matches_closed_events
<<-SQL.strip_heredoc
WHERE metrics.merge_request_id = closed_events.target_id
AND metrics.latest_closed_at IS NULL
AND metrics.latest_closed_by_id IS NULL
SQL
end
def where_matches_merged_events
<<-SQL.strip_heredoc
WHERE metrics.merge_request_id = merged_events.target_id
AND metrics.merged_by_id IS NULL
SQL
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class PopulateMergeRequestMetricsWithEventsDataImproved
CLOSED_EVENT_ACTION = 3
MERGED_EVENT_ACTION = 7
def perform(min_merge_request_id, max_merge_request_id)
insert_metrics_for_range(min_merge_request_id, max_merge_request_id)
update_metrics_with_events_data(min_merge_request_id, max_merge_request_id)
end
# Inserts merge_request_metrics records for merge_requests without it for
# a given merge request batch.
def insert_metrics_for_range(min, max)
metrics_not_exists_clause =
<<-SQL.strip_heredoc
NOT EXISTS (SELECT 1 FROM merge_request_metrics
WHERE merge_request_metrics.merge_request_id = merge_requests.id)
SQL
MergeRequest.where(metrics_not_exists_clause).where(id: min..max).each_batch do |batch|
select_sql = batch.select(:id, :created_at, :updated_at).to_sql
execute("INSERT INTO merge_request_metrics (merge_request_id, created_at, updated_at) #{select_sql}")
end
end
def update_metrics_with_events_data(min, max)
if Gitlab::Database.postgresql?
psql_update_metrics_with_events_data(min, max)
else
mysql_update_metrics_with_events_data(min, max)
end
end
def psql_update_metrics_with_events_data(min, max)
update_sql = <<-SQL.strip_heredoc
UPDATE merge_request_metrics
SET (latest_closed_at,
latest_closed_by_id) =
( SELECT updated_at,
author_id
FROM events
WHERE target_id = merge_request_id
AND target_type = 'MergeRequest'
AND action = #{CLOSED_EVENT_ACTION}
ORDER BY id DESC
LIMIT 1 ),
merged_by_id =
( SELECT author_id
FROM events
WHERE target_id = merge_request_id
AND target_type = 'MergeRequest'
AND action = #{MERGED_EVENT_ACTION}
ORDER BY id DESC
LIMIT 1 )
WHERE merge_request_id BETWEEN #{min} AND #{max}
SQL
execute(update_sql)
end
def mysql_update_metrics_with_events_data(min, max)
closed_updated_at_subquery = mysql_events_select(:updated_at, CLOSED_EVENT_ACTION)
closed_author_id_subquery = mysql_events_select(:author_id, CLOSED_EVENT_ACTION)
merged_author_id_subquery = mysql_events_select(:author_id, MERGED_EVENT_ACTION)
update_sql = <<-SQL.strip_heredoc
UPDATE merge_request_metrics
SET latest_closed_at = (#{closed_updated_at_subquery}),
latest_closed_by_id = (#{closed_author_id_subquery}),
merged_by_id = (#{merged_author_id_subquery})
WHERE merge_request_id BETWEEN #{min} AND #{max}
SQL
execute(update_sql)
end
def mysql_events_select(column, action)
<<-SQL.strip_heredoc
SELECT #{column} FROM events
WHERE target_id = merge_request_id
AND target_type = 'MergeRequest'
AND action = #{action}
ORDER BY id DESC
LIMIT 1
SQL
end
def execute(sql)
@connection ||= ActiveRecord::Base.connection
@connection.execute(sql)
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
require_relative 'redact_links/redactable'
module Gitlab
module BackgroundMigration
class RedactLinks
class Note < ActiveRecord::Base
include EachBatch
include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
self.table_name = 'notes'
self.inheritance_column = :_type_disabled
end
class Issue < ActiveRecord::Base
include EachBatch
include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
end
class MergeRequest < ActiveRecord::Base
include EachBatch
include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
self.table_name = 'merge_requests'
self.inheritance_column = :_type_disabled
end
class Snippet < ActiveRecord::Base
include EachBatch
include ::Gitlab::BackgroundMigration::RedactLinks::Redactable
self.table_name = 'snippets'
self.inheritance_column = :_type_disabled
end
def perform(model_name, field, start_id, stop_id)
link_pattern = "%/sent_notifications/" + ("_" * 32) + "/unsubscribe%"
model = "Gitlab::BackgroundMigration::RedactLinks::#{model_name}".constantize
model.where("#{field} like ?", link_pattern).where(id: start_id..stop_id).each do |resource|
resource.redact_field!(field)
end
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class RedactLinks
module Redactable
extend ActiveSupport::Concern
def redact_field!(field)
self[field].gsub!(%r{/sent_notifications/\h{32}/unsubscribe}, '/sent_notifications/REDACTED/unsubscribe')
if self.changed?
self.update_columns(field => self[field],
"#{field}_html" => nil)
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This background migration migrates all the data of import_state
# back to the projects table for projects that are considered imports or forks
class RollbackImportStateData
def perform(start_id, end_id)
move_attributes_data_to_project(start_id, end_id)
end
def move_attributes_data_to_project(start_id, end_id)
Rails.logger.info("#{self.class.name} - Moving import attributes data to projects table: #{start_id} - #{end_id}")
if Gitlab::Database.mysql?
ActiveRecord::Base.connection.execute <<~SQL
UPDATE projects, project_mirror_data
SET
projects.import_status = project_mirror_data.status,
projects.import_jid = project_mirror_data.jid,
projects.import_error = project_mirror_data.last_error
WHERE project_mirror_data.project_id = projects.id
AND project_mirror_data.id BETWEEN #{start_id} AND #{end_id}
SQL
else
ActiveRecord::Base.connection.execute <<~SQL
UPDATE projects
SET
import_status = project_mirror_data.status,
import_jid = project_mirror_data.jid,
import_error = project_mirror_data.last_error
FROM project_mirror_data
WHERE project_mirror_data.project_id = projects.id
AND project_mirror_data.id BETWEEN #{start_id} AND #{end_id}
SQL
end
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Style/Documentation
module Gitlab
module BackgroundMigration
class ScheduleDiffFilesDeletion
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
belongs_to :merge_request
include EachBatch
end
DIFF_BATCH_SIZE = 5_000
INTERVAL = 5.minutes
MIGRATION = 'DeleteDiffFiles'
def perform
diffs = MergeRequestDiff
.from("(#{diffs_collection.to_sql}) merge_request_diffs")
.where('merge_request_diffs.id != merge_request_diffs.latest_merge_request_diff_id')
.select(:id)
diffs.each_batch(of: DIFF_BATCH_SIZE) do |relation, index|
ids = relation.pluck(:id)
BackgroundMigrationWorker.perform_in(index * INTERVAL, MIGRATION, [ids])
end
end
private
def diffs_collection
MergeRequestDiff
.joins(:merge_request)
.where("merge_requests.state = 'merged'")
.where('merge_requests.latest_merge_request_diff_id IS NOT NULL')
.where("merge_request_diffs.state NOT IN ('without_files', 'empty')")
.select('merge_requests.latest_merge_request_diff_id, merge_request_diffs.id')
end
end
end
end
......@@ -149,7 +149,7 @@ module Gitlab
# column - The name of the column to create the foreign key on.
# on_delete - The action to perform when associated data is removed,
# defaults to "CASCADE".
def add_concurrent_foreign_key(source, target, column:, on_delete: :cascade)
def add_concurrent_foreign_key(source, target, column:, on_delete: :cascade, name: nil)
# Transactions would result in ALTER TABLE locks being held for the
# duration of the transaction, defeating the purpose of this method.
if transaction_open?
......@@ -167,14 +167,18 @@ module Gitlab
return
end
return add_foreign_key(source, target,
column: column,
on_delete: on_delete)
key_options = { column: column, on_delete: on_delete }
# The MySQL adapter tries to create a foreign key without a name when
# `:name` is nil, instead of generating a name for us.
key_options[:name] = name if name
return add_foreign_key(source, target, key_options)
else
on_delete = 'SET NULL' if on_delete == :nullify
end
key_name = concurrent_foreign_key_name(source, column)
key_name = name || concurrent_foreign_key_name(source, column)
unless foreign_key_exists?(source, target, column: column)
Rails.logger.warn "Foreign key not created because it exists already " \
......
# frozen_string_literal: true
module Gitlab
module ImportExport
class Config
# Returns a Hash of the YAML file, including EE specific data if EE is
# used.
def to_h
hash = parse_yaml
ee_hash = hash['ee']
if merge? && ee_hash
ee_hash.each do |key, value|
if key == 'project_tree'
merge_project_tree(value, hash[key])
else
merge_attributes_list(value, hash[key])
end
end
end
# We don't want to expose this section after this point, as it is no
# longer needed.
hash.delete('ee')
hash
end
# Merges a project relationships tree into the target tree.
#
# @param [Array<Hash|Symbol>] source_values
# @param [Array<Hash|Symbol>] target_values
def merge_project_tree(source_values, target_values)
source_values.each do |value|
if value.is_a?(Hash)
# Examples:
#
# { 'project_tree' => [{ 'labels' => [...] }] }
# { 'notes' => [:author, { 'events' => [:push_event_payload] }] }
value.each do |key, val|
target = target_values
.find { |h| h.is_a?(Hash) && h[key] }
if target
merge_project_tree(val, target[key])
else
target_values << { key => val.dup }
end
end
else
# Example: :priorities, :author, etc
target_values << value
end
end
end
# Merges a Hash containing a flat list of attributes, such as the entries
# in a `excluded_attributes` section.
#
# @param [Hash] source_values
# @param [Hash] target_values
def merge_attributes_list(source_values, target_values)
source_values.each do |key, values|
target_values[key] ||= []
target_values[key].concat(values)
end
end
def merge?
Gitlab.ee?
end
def parse_yaml
YAML.load_file(Gitlab::ImportExport.config_file)
end
end
end
end
# Model relationships to be included in the project import/export
#
# This list _must_ only contain relationships that are available to both CE and
# EE. EE specific relationships must be defined in the `ee` section further
# down below.
project_tree:
- labels:
:priorities
- :priorities
- milestones:
- events:
- :push_event_payload
......@@ -15,18 +19,18 @@ project_tree:
- :push_event_payload
- label_links:
- label:
:priorities
- :priorities
- milestone:
- events:
- :push_event_payload
- resource_label_events:
- label:
:priorities
- :priorities
- :issue_assignees
- snippets:
- :award_emoji
- notes:
:author
- :author
- releases:
- :links
- project_members:
......@@ -46,13 +50,13 @@ project_tree:
- :timelogs
- label_links:
- label:
:priorities
- :priorities
- milestone:
- events:
- :push_event_payload
- resource_label_events:
- label:
:priorities
- :priorities
- ci_pipelines:
- notes:
- :author
......@@ -121,12 +125,22 @@ excluded_attributes:
- :bfg_object_map
- :detected_repository_languages
- :tag_list
- :mirror_user_id
- :mirror_trigger_builds
- :only_mirror_protected_branches
- :pull_mirror_available_overridden
- :mirror_overwrites_diverged_branches
- :packages_enabled
- :mirror_last_update_at
- :mirror_last_successful_update_at
namespaces:
- :runners_token
- :runners_token_encrypted
project_import_state:
- :last_error
- :jid
- :last_update_at
- :last_successful_update_at
prometheus_metrics:
- :common
- :identifier
......@@ -201,3 +215,12 @@ methods:
- :action
project_badges:
- :type
# EE specific relationships and settings to include. All of this will be merged
# into the previous structures if EE is used.
ee:
project_tree:
- protected_branches:
- :unprotect_access_levels
- protected_environments:
- :deploy_access_levels
......@@ -7,7 +7,7 @@ module Gitlab
def initialize(shared:)
@shared = shared
config_hash = YAML.load_file(Gitlab::ImportExport.config_file).deep_symbolize_keys
config_hash = ImportExport::Config.new.to_h.deep_symbolize_keys
@tree = config_hash[:project_tree]
@attributes_finder = Gitlab::ImportExport::AttributesFinder.new(included_attributes: config_hash[:included_attributes],
excluded_attributes: config_hash[:excluded_attributes],
......
......@@ -7,7 +7,7 @@ namespace :gitlab do
desc "GitLab | Display exported DB structure"
task data: :environment do
puts YAML.load_file(Gitlab::ImportExport.config_file)['project_tree'].to_yaml(SortKeys: true)
puts Gitlab::ImportExport::Config.new.to_h['project_tree'].to_yaml(SortKeys: true)
end
desc 'GitLab | Bumps the Import/Export version in fixtures and project templates'
......
......@@ -9,9 +9,13 @@ describe 'Database schema' do
# Use if you are certain that this column should not have a foreign key
IGNORED_FK_COLUMNS = {
abuse_reports: %w[reporter_id user_id],
application_settings: %w[performance_bar_allowed_group_id],
application_settings: %w[performance_bar_allowed_group_id slack_app_id snowplow_site_id],
approvers: %w[target_id user_id],
approvals: %w[user_id],
approver_groups: %w[target_id],
audit_events: %w[author_id entity_id],
award_emoji: %w[awardable_id user_id],
boards: %w[milestone_id],
chat_names: %w[chat_id service_id team_id user_id],
chat_teams: %w[team_id],
ci_builds: %w[erased_by_id runner_id trigger_request_id user_id],
......@@ -21,15 +25,25 @@ describe 'Database schema' do
cluster_providers_gcp: %w[gcp_project_id operation_id],
deploy_keys_projects: %w[deploy_key_id],
deployments: %w[deployable_id environment_id user_id],
draft_notes: %w[discussion_id],
emails: %w[user_id],
events: %w[target_id],
epics: %w[updated_by_id last_edited_by_id start_date_sourcing_milestone_id due_date_sourcing_milestone_id],
forked_project_links: %w[forked_from_project_id],
geo_event_log: %w[hashed_storage_attachments_event_id],
geo_job_artifact_deleted_events: %w[job_artifact_id],
geo_lfs_object_deleted_events: %w[lfs_object_id],
geo_node_statuses: %w[last_event_id cursor_last_event_id],
geo_nodes: %w[oauth_application_id],
geo_repository_deleted_events: %w[project_id],
geo_upload_deleted_events: %w[upload_id model_id],
identities: %w[user_id],
issues: %w[last_edited_by_id state_id],
jira_tracker_data: %w[jira_issue_transition_id],
keys: %w[user_id],
label_links: %w[target_id],
lfs_objects_projects: %w[lfs_object_id project_id],
ldap_group_links: %w[group_id],
members: %w[source_id created_by_id],
merge_requests: %w[last_edited_by_id state_id],
namespaces: %w[owner_id parent_id],
......@@ -40,7 +54,7 @@ describe 'Database schema' do
oauth_applications: %w[owner_id],
project_group_links: %w[group_id],
project_statistics: %w[namespace_id],
projects: %w[creator_id namespace_id ci_id],
projects: %w[creator_id namespace_id ci_id mirror_user_id],
redirect_routes: %w[source_id],
repository_languages: %w[programming_language_id],
routes: %w[source_id],
......@@ -48,14 +62,17 @@ describe 'Database schema' do
snippets: %w[author_id],
spam_logs: %w[user_id],
subscriptions: %w[user_id subscribable_id],
slack_integrations: %w[team_id user_id],
taggings: %w[tag_id taggable_id tagger_id],
timelogs: %w[user_id],
todos: %w[target_id commit_id],
uploads: %w[model_id],
user_agent_details: %w[subject_id],
users: %w[color_scheme_id created_by_id theme_id],
users: %w[color_scheme_id created_by_id theme_id email_opted_in_source_id],
users_star_projects: %w[user_id],
web_hooks: %w[service_id],
vulnerability_identifiers: %w[external_id],
vulnerability_scanners: %w[external_id],
web_hooks: %w[service_id group_id],
suggestions: %w[commit_id]
}.with_indifferent_access.freeze
......
......@@ -10,7 +10,7 @@ describe 'Import/Export - project export integration test', :js do
let(:user) { create(:admin) }
let(:export_path) { "#{Dir.tmpdir}/import_file_spec" }
let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:sensitive_words) { %w[pass secret token key encrypted html] }
let(:safe_list) do
......
require 'spec_helper'
describe Gitlab::BackgroundMigration::CreateGpgKeySubkeysFromGpgKeys, :migration, schema: 20171005130944 do
context 'when GpgKey exists' do
let!(:gpg_key) { create(:gpg_key, key: GpgHelpers::User3.public_key) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
before do
GpgKeySubkey.destroy_all # rubocop: disable DestroyAll
end
it 'generate the subkeys' do
expect do
described_class.new.perform(gpg_key.id)
end.to change { gpg_key.subkeys.count }.from(0).to(2)
end
it 'schedules the signature update worker' do
expect(InvalidGpgSignatureUpdateWorker).to receive(:perform_async).with(gpg_key.id)
described_class.new.perform(gpg_key.id)
end
end
context 'when GpgKey does not exist' do
it 'does not do anything' do
expect(Gitlab::Gpg).not_to receive(:subkeys_from_key)
expect(InvalidGpgSignatureUpdateWorker).not_to receive(:perform_async)
described_class.new.perform(123)
end
end
end
require 'spec_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::DeleteDiffFiles, :migration, :sidekiq, schema: 20180619121030 do
describe '#perform' do
before do
# This migration was created before we introduced ProjectCiCdSetting#default_git_depth
allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth=).and_return(0)
allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth).and_return(nil)
end
context 'when diff files can be deleted' do
let(:merge_request) { create(:merge_request, :merged) }
let!(:merge_request_diff) do
merge_request.create_merge_request_diff
merge_request.merge_request_diffs.first
end
let(:perform) do
described_class.new.perform(MergeRequestDiff.pluck(:id))
end
it 'deletes all merge request diff files' do
expect { perform }
.to change { merge_request_diff.merge_request_diff_files.count }
.from(20).to(0)
end
it 'updates state to without_files' do
expect { perform }
.to change { merge_request_diff.reload.state }
.from('collected').to('without_files')
end
it 'rollsback if something goes wrong' do
expect(described_class::MergeRequestDiffFile).to receive_message_chain(:where, :delete_all)
.and_raise
expect { perform }
.to raise_error
merge_request_diff.reload
expect(merge_request_diff.state).to eq('collected')
expect(merge_request_diff.merge_request_diff_files.count).to eq(20)
end
end
it 'reschedules itself when should_wait_deadtuple_vacuum' do
merge_request = create(:merge_request, :merged)
first_diff = merge_request.merge_request_diff
second_diff = merge_request.create_merge_request_diff
Sidekiq::Testing.fake! do
worker = described_class.new
allow(worker).to receive(:should_wait_deadtuple_vacuum?) { true }
worker.perform([first_diff.id, second_diff.id])
expect(described_class.name.demodulize).to be_scheduled_delayed_migration(5.minutes, [first_diff.id, second_diff.id])
expect(BackgroundMigrationWorker.jobs.size).to eq(1)
end
end
end
describe '#should_wait_deadtuple_vacuum?' do
it 'returns true when hitting merge_request_diff_files hits DEAD_TUPLES_THRESHOLD', :postgresql do
worker = described_class.new
threshold_query_result = [{ "n_dead_tup" => described_class::DEAD_TUPLES_THRESHOLD.to_s }]
normal_query_result = [{ "n_dead_tup" => '3' }]
allow(worker)
.to receive(:execute_statement)
.with(/SELECT n_dead_tup */)
.and_return(threshold_query_result, normal_query_result)
expect(worker.should_wait_deadtuple_vacuum?).to be(true)
end
end
end
# rubocop:enable RSpec/FactoriesInMigrationSpecs
require 'spec_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :migration, schema: 20171114162227 do
include GitHelpers
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
describe '#perform' do
let(:project) { create(:project, :repository) }
let(:merge_request) { merge_requests.create!(iid: 1, target_project_id: project.id, source_project_id: project.id, target_branch: 'feature', source_branch: 'master').becomes(MergeRequest) }
let(:merge_request_diff) { MergeRequest.find(merge_request.id).create_merge_request_diff }
let(:updated_merge_request_diff) { MergeRequestDiff.find(merge_request_diff.id) }
let(:rugged) { rugged_repo(project.repository) }
before do
allow_any_instance_of(MergeRequestDiff)
.to receive(:commits_count=).and_return(nil)
end
def diffs_to_hashes(diffs)
diffs.as_json(only: Gitlab::Git::Diff::SERIALIZE_KEYS).map(&:with_indifferent_access)
end
def quote_yaml(value)
MergeRequestDiff.connection.quote(YAML.dump(value))
end
def convert_to_yaml(merge_request_diff_id, commits, diffs)
MergeRequestDiff.where(id: merge_request_diff_id).update_all(
"st_commits = #{quote_yaml(commits)}, st_diffs = #{quote_yaml(diffs)}"
)
end
shared_examples 'updated MR diff' do
before do
convert_to_yaml(merge_request_diff.id, commits, diffs)
MergeRequestDiffCommit.delete_all
MergeRequestDiffFile.delete_all
subject.perform(merge_request_diff.id, merge_request_diff.id)
end
it 'creates correct entries in the merge_request_diff_commits table' do
expect(updated_merge_request_diff.merge_request_diff_commits.count).to eq(expected_commits.count)
expect(updated_merge_request_diff.commits.map(&:to_hash)).to eq(expected_commits)
end
it 'creates correct entries in the merge_request_diff_files table' do
expect(updated_merge_request_diff.merge_request_diff_files.count).to eq(expected_diffs.count)
expect(diffs_to_hashes(updated_merge_request_diff.raw_diffs)).to eq(expected_diffs)
end
it 'sets the st_commits and st_diffs columns to nil' do
expect(updated_merge_request_diff.st_commits_before_type_cast).to be_nil
expect(updated_merge_request_diff.st_diffs_before_type_cast).to be_nil
end
end
context 'when the diff IDs passed do not exist' do
it 'does not raise' do
expect { subject.perform(0, 0) }.not_to raise_exception
end
end
context 'when the merge request diff has no serialised commits or diffs' do
before do
merge_request_diff.update(st_commits: nil, st_diffs: nil)
end
it 'does not raise' do
expect { subject.perform(merge_request_diff.id, merge_request_diff.id) }
.not_to raise_exception
end
end
context 'processing multiple merge request diffs' do
let(:start_id) { described_class::MergeRequestDiff.minimum(:id) }
let(:stop_id) { described_class::MergeRequestDiff.maximum(:id) }
before do
merge_request.create_merge_request_diff
convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
MergeRequestDiffCommit.delete_all
MergeRequestDiffFile.delete_all
end
context 'when BUFFER_ROWS is exceeded' do
before do
stub_const("#{described_class}::BUFFER_ROWS", 1)
allow(Gitlab::Database).to receive(:bulk_insert).and_call_original
end
it 'inserts commit rows in chunks of BUFFER_ROWS' do
# There are 29 commits in each diff, so we should have slices of 20 + 9 + 20 + 9.
stub_const("#{described_class}::BUFFER_ROWS", 20)
expect(Gitlab::Database).to receive(:bulk_insert)
.with('merge_request_diff_commits', anything)
.exactly(4)
.times
.and_call_original
subject.perform(start_id, stop_id)
end
it 'inserts diff rows in chunks of DIFF_FILE_BUFFER_ROWS' do
# There are 20 files in each diff, so we should have slices of 20 + 20.
stub_const("#{described_class}::DIFF_FILE_BUFFER_ROWS", 20)
expect(Gitlab::Database).to receive(:bulk_insert)
.with('merge_request_diff_files', anything)
.exactly(2)
.times
.and_call_original
subject.perform(start_id, stop_id)
end
end
context 'when BUFFER_ROWS is not exceeded' do
it 'only updates once' do
expect(Gitlab::Database).to receive(:bulk_insert)
.with('merge_request_diff_commits', anything)
.once
.and_call_original
expect(Gitlab::Database).to receive(:bulk_insert)
.with('merge_request_diff_files', anything)
.once
.and_call_original
subject.perform(start_id, stop_id)
end
end
context 'when some rows were already inserted due to a previous failure' do
before do
subject.perform(start_id, stop_id)
convert_to_yaml(start_id, merge_request_diff.commits, diffs_to_hashes(merge_request_diff.merge_request_diff_files))
convert_to_yaml(stop_id, updated_merge_request_diff.commits, diffs_to_hashes(updated_merge_request_diff.merge_request_diff_files))
end
it 'does not raise' do
expect { subject.perform(start_id, stop_id) }.not_to raise_exception
end
it 'logs a message' do
expect(Rails.logger).to receive(:info)
.with(
a_string_matching(described_class.name).and(matching([start_id, stop_id].inspect))
)
.twice
subject.perform(start_id, stop_id)
end
it 'ends up with the correct rows' do
expect(updated_merge_request_diff.commits.count).to eq(29)
expect(updated_merge_request_diff.raw_diffs.count).to eq(20)
end
end
context 'when the merge request diff update fails' do
let(:exception) { ActiveRecord::RecordNotFound }
let(:perform_ignoring_exceptions) do
subject.perform(start_id, stop_id)
rescue described_class::Error
end
before do
allow_any_instance_of(ActiveRecord::Relation)
.to receive(:update_all).and_raise(exception)
end
it 'raises an error' do
expect { subject.perform(start_id, stop_id) }
.to raise_exception(described_class::Error)
end
it 'logs the error' do
expect(Rails.logger).to receive(:info).with(
a_string_matching(described_class.name)
.and(matching([start_id, stop_id].inspect))
.and(matching(exception.name))
)
perform_ignoring_exceptions
end
it 'still adds diff commits' do
expect { perform_ignoring_exceptions }
.to change { MergeRequestDiffCommit.count }
end
it 'still adds diff files' do
expect { perform_ignoring_exceptions }
.to change { MergeRequestDiffFile.count }
end
end
end
context 'when the merge request diff has valid commits and diffs' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:expected_commits) { commits }
let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:expected_diffs) { diffs }
include_examples 'updated MR diff'
end
context 'when the merge request diff has diffs but no commits' do
let(:commits) { nil }
let(:expected_commits) { [] }
let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:expected_diffs) { diffs }
include_examples 'updated MR diff'
end
context 'when the merge request diffs do not have too_large set' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:expected_commits) { commits }
let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:diffs) do
expected_diffs.map { |diff| diff.except(:too_large) }
end
include_examples 'updated MR diff'
end
context 'when the merge request diffs do not have a_mode and b_mode set' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:expected_commits) { commits }
let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:diffs) do
expected_diffs.map { |diff| diff.except(:a_mode, :b_mode) }
end
include_examples 'updated MR diff'
end
context 'when the merge request diffs have binary content' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:expected_commits) { commits }
let(:expected_diffs) { diffs }
# The start of a PDF created by Illustrator
let(:binary_string) do
"\x25\x50\x44\x46\x2d\x31\x2e\x35\x0d\x25\xe2\xe3\xcf\xd3\x0d\x0a".force_encoding(Encoding::BINARY)
end
let(:diffs) do
[
{
'diff' => binary_string,
'new_path' => 'path',
'old_path' => 'path',
'a_mode' => '100644',
'b_mode' => '100644',
'new_file' => false,
'renamed_file' => false,
'deleted_file' => false,
'too_large' => false
}
]
end
include_examples 'updated MR diff'
end
context 'when the merge request diff has commits, but no diffs' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:expected_commits) { commits }
let(:diffs) { [] }
let(:expected_diffs) { diffs }
include_examples 'updated MR diff'
end
context 'when the merge request diffs have invalid content' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:expected_commits) { commits }
let(:diffs) { ['--broken-diff'] }
let(:expected_diffs) { [] }
include_examples 'updated MR diff'
end
context 'when the merge request diffs are Rugged::Patch instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
let(:expected_commits) { commits }
let(:diffs) { rugged_diff(first_commit.sha).patches }
let(:expected_diffs) { [] }
include_examples 'updated MR diff'
end
context 'when the merge request diffs are Rugged::Diff::Delta instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:first_commit) { project.repository.commit(merge_request_diff.head_commit_sha) }
let(:expected_commits) { commits }
let(:diffs) { rugged_diff(first_commit.sha).deltas }
let(:expected_diffs) { [] }
include_examples 'updated MR diff'
end
def rugged_diff(commit_sha)
rugged_commit = rugged.lookup(commit_sha)
rugged_commit.parents[0].diff(rugged_commit)
end
end
end
# rubocop:enable RSpec/FactoriesInMigrationSpecs
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgressCheck do
context 'rescheduling' do
context 'when there are ongoing and no dead jobs' do
it 'reschedules check' do
allow(Gitlab::BackgroundMigration).to receive(:exists?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(true)
allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
described_class.new.perform
end
end
context 'when there are ongoing and dead jobs' do
it 'reschedules check' do
allow(Gitlab::BackgroundMigration).to receive(:exists?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(true)
allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(true)
expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
described_class.new.perform
end
end
context 'when there retrying jobs and no scheduled' do
it 'reschedules check' do
allow(Gitlab::BackgroundMigration).to receive(:exists?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(true)
expect(BackgroundMigrationWorker).to receive(:perform_in).with(described_class::RESCHEDULE_DELAY, described_class.name)
described_class.new.perform
end
end
end
context 'when there are no scheduled, or retrying or dead' do
it 'enables feature' do
allow(Gitlab::BackgroundMigration).to receive(:exists?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
expect(Feature).to receive(:enable).with(:multiple_merge_request_assignees)
described_class.new.perform
end
end
context 'when there are only dead jobs' do
it 'raises DeadJobsError error' do
allow(Gitlab::BackgroundMigration).to receive(:exists?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
allow(Gitlab::BackgroundMigration).to receive(:retrying_jobs?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(false)
allow(Gitlab::BackgroundMigration).to receive(:dead_jobs?)
.with('PopulateMergeRequestAssigneesTable')
.and_return(true)
expect { described_class.new.perform }
.to raise_error(described_class::DeadJobsError,
"Only dead background jobs in the queue for #{described_class::WORKER}")
end
end
end
# frozen_string_literal: true
require 'spec_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::PopulateExternalPipelineSource, :migration, schema: 20180916011959 do
let(:migration) { described_class.new }
before do
# This migration was created before we introduced metadata configs
stub_feature_flags(ci_build_metadata_config: false)
# This migration was created before we introduced ProjectCiCdSetting#default_git_depth
allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth).and_return(nil)
allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth=).and_return(0)
end
let!(:internal_pipeline) { create(:ci_pipeline, source: :web) }
let(:pipelines) { [internal_pipeline, unknown_pipeline].map(&:id) }
let!(:unknown_pipeline) do
build(:ci_pipeline, source: :unknown)
.tap { |pipeline| pipeline.save(validate: false) }
end
subject { migration.perform(pipelines.min, pipelines.max) }
shared_examples 'no changes' do
it 'does not change the pipeline source' do
expect { subject }.not_to change { unknown_pipeline.reload.source }
end
end
context 'when unknown pipeline is external' do
before do
create(:generic_commit_status, pipeline: unknown_pipeline)
end
it 'populates the pipeline source' do
subject
expect(unknown_pipeline.reload.source).to eq('external')
end
it 'can be repeated without effect' do
subject
expect { subject }.not_to change { unknown_pipeline.reload.source }
end
end
context 'when unknown pipeline has just a build' do
before do
create(:ci_build, pipeline: unknown_pipeline)
end
it_behaves_like 'no changes'
end
context 'when unknown pipeline has no statuses' do
it_behaves_like 'no changes'
end
context 'when unknown pipeline has a build and a status' do
before do
create(:generic_commit_status, pipeline: unknown_pipeline)
create(:ci_build, pipeline: unknown_pipeline)
end
it_behaves_like 'no changes'
end
end
# rubocop:enable RSpec/FactoriesInMigrationSpecs
require 'spec_helper'
describe Gitlab::BackgroundMigration::PopulateImportState, :migration, schema: 20180502134117 do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
before do
namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab1',
path: 'gitlab1', import_error: "foo", import_status: :started,
import_url: generate(:url))
projects.create!(id: 2, namespace_id: 1, name: 'gitlab2', path: 'gitlab2',
import_status: :none, import_url: generate(:url))
projects.create!(id: 3, namespace_id: 1, name: 'gitlab3',
path: 'gitlab3', import_error: "bar", import_status: :failed,
import_url: generate(:url))
allow(BackgroundMigrationWorker).to receive(:perform_in)
end
it "creates new import_state records with project's import data" do
expect(projects.where.not(import_status: :none).count).to eq(2)
expect do
migration.perform(1, 3)
end.to change { import_state.all.count }.from(0).to(2)
expect(import_state.first.last_error).to eq("foo")
expect(import_state.last.last_error).to eq("bar")
expect(import_state.first.status).to eq("started")
expect(import_state.last.status).to eq("failed")
expect(projects.first.import_status).to eq("none")
expect(projects.last.import_status).to eq("none")
end
end
# frozen_string_literal: true
require 'rails_helper'
describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsDataImproved, :migration, schema: 20181204154019 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:users) { table(:users) }
let(:events) { table(:events) }
let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create(namespace_id: namespace.id, name: 'foo') }
let(:merge_requests) { table(:merge_requests) }
def create_merge_request(id, params = {})
params.merge!(id: id,
target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: 'mr name',
title: "mr name#{id}")
merge_requests.create(params)
end
def create_merge_request_event(id, params = {})
params.merge!(id: id,
project_id: project.id,
author_id: user.id,
target_type: 'MergeRequest')
events.create(params)
end
describe '#perform' do
it 'creates and updates closed and merged events' do
timestamp = Time.new('2018-01-01 12:00:00').utc
create_merge_request(1)
create_merge_request_event(1, target_id: 1, action: 3, updated_at: timestamp)
create_merge_request_event(2, target_id: 1, action: 3, updated_at: timestamp + 10.seconds)
create_merge_request_event(3, target_id: 1, action: 7, updated_at: timestamp)
create_merge_request_event(4, target_id: 1, action: 7, updated_at: timestamp + 10.seconds)
subject.perform(1, 1)
merge_request = MergeRequest.first
expect(merge_request.metrics).to have_attributes(latest_closed_by_id: user.id,
latest_closed_at: timestamp + 10.seconds,
merged_by_id: user.id)
end
end
end
require 'rails_helper'
# rubocop:disable RSpec/FactoriesInMigrationSpecs
describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsData, :migration, schema: 20171128214150 do
# commits_count attribute is added in a next migration
before do
allow_any_instance_of(MergeRequestDiff)
.to receive(:commits_count=).and_return(nil)
end
describe '#perform' do
let(:mr_with_event) { create(:merge_request) }
let!(:merged_event) { create(:event, :merged, target: mr_with_event) }
let!(:closed_event) { create(:event, :closed, target: mr_with_event) }
before do
# Make sure no metrics are created and kept through after_* callbacks.
mr_with_event.metrics.destroy!
end
it 'inserts metrics and updates closed and merged events' do
subject.perform(mr_with_event.id, mr_with_event.id)
mr_with_event.reload
expect(mr_with_event.metrics).to have_attributes(latest_closed_by_id: closed_event.author_id,
merged_by_id: merged_event.author_id)
expect(mr_with_event.metrics.latest_closed_at.to_s).to eq(closed_event.updated_at.to_s)
end
end
describe '#insert_metrics_for_range' do
let!(:mrs_without_metrics) { create_list(:merge_request, 3) }
let!(:mrs_with_metrics) { create_list(:merge_request, 2) }
before do
# Make sure no metrics are created and kept through after_* callbacks.
mrs_without_metrics.each { |m| m.metrics.destroy! }
end
it 'inserts merge_request_metrics for merge_requests without one' do
expect { subject.insert_metrics_for_range(MergeRequest.first.id, MergeRequest.last.id) }
.to change(MergeRequest::Metrics, :count).from(2).to(5)
mrs_without_metrics.each do |mr_without_metrics|
expect(mr_without_metrics.reload.metrics).to be_present
end
end
it 'does not inserts merge_request_metrics for MRs out of given range' do
expect { subject.insert_metrics_for_range(mrs_with_metrics.first.id, mrs_with_metrics.last.id) }
.not_to change(MergeRequest::Metrics, :count).from(2)
end
end
describe '#update_metrics_with_events_data' do
context 'closed events data update' do
let(:users) { create_list(:user, 3) }
let(:mrs_with_event) { create_list(:merge_request, 3) }
before do
create_list(:event, 2, :closed, author: users.first, target: mrs_with_event.first)
create_list(:event, 3, :closed, author: users.second, target: mrs_with_event.second)
create(:event, :closed, author: users.third, target: mrs_with_event.third)
end
it 'migrates multiple MR metrics with closed event data' do
mr_without_event = create(:merge_request)
create(:event, :merged)
subject.update_metrics_with_events_data(mrs_with_event.first.id, mrs_with_event.last.id)
mrs_with_event.each do |mr_with_event|
latest_event = Event.where(action: 3, target: mr_with_event).last
mr_with_event.metrics.reload
expect(mr_with_event.metrics.latest_closed_by).to eq(latest_event.author)
expect(mr_with_event.metrics.latest_closed_at.to_s).to eq(latest_event.updated_at.to_s)
end
expect(mr_without_event.metrics.reload).to have_attributes(latest_closed_by_id: nil,
latest_closed_at: nil)
end
it 'does not updates metrics out of given range' do
out_of_range_mr = create(:merge_request)
create(:event, :closed, author: users.last, target: out_of_range_mr)
expect { subject.perform(mrs_with_event.first.id, mrs_with_event.second.id) }
.not_to change { out_of_range_mr.metrics.reload.merged_by }
.from(nil)
end
end
context 'merged events data update' do
let(:users) { create_list(:user, 3) }
let(:mrs_with_event) { create_list(:merge_request, 3) }
before do
create_list(:event, 2, :merged, author: users.first, target: mrs_with_event.first)
create_list(:event, 3, :merged, author: users.second, target: mrs_with_event.second)
create(:event, :merged, author: users.third, target: mrs_with_event.third)
end
it 'migrates multiple MR metrics with merged event data' do
mr_without_event = create(:merge_request)
create(:event, :merged)
subject.update_metrics_with_events_data(mrs_with_event.first.id, mrs_with_event.last.id)
mrs_with_event.each do |mr_with_event|
latest_event = Event.where(action: Event::MERGED, target: mr_with_event).last
expect(mr_with_event.metrics.reload.merged_by).to eq(latest_event.author)
end
expect(mr_without_event.metrics.reload).to have_attributes(merged_by_id: nil)
end
it 'does not updates metrics out of given range' do
out_of_range_mr = create(:merge_request)
create(:event, :merged, author: users.last, target: out_of_range_mr)
expect { subject.perform(mrs_with_event.first.id, mrs_with_event.second.id) }
.not_to change { out_of_range_mr.metrics.reload.merged_by }
.from(nil)
end
end
end
end
# rubocop:enable RSpec/FactoriesInMigrationSpecs
require 'spec_helper'
describe Gitlab::BackgroundMigration::RedactLinks, :migration, schema: 20181014121030 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:issues) { table(:issues) }
let(:notes) { table(:notes) }
let(:snippets) { table(:snippets) }
let(:users) { table(:users) }
let(:merge_requests) { table(:merge_requests) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create(namespace_id: namespace.id, name: 'foo') }
let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
def create_merge_request(id, params)
params.merge!(id: id,
target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: 'mr name',
title: "mr name#{id}")
merge_requests.create(params)
end
def create_issue(id, params)
params.merge!(id: id, title: "issue#{id}", project_id: project.id)
issues.create(params)
end
def create_note(id, params)
params[:id] = id
notes.create(params)
end
def create_snippet(id, params)
params.merge!(id: id, author_id: user.id)
snippets.create(params)
end
def create_resource(model, id, params)
send("create_#{model.name.underscore}", id, params)
end
shared_examples_for 'redactable resource' do
it 'updates only matching texts' do
matching_text = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
redacted_text = 'some text /sent_notifications/REDACTED/unsubscribe more text'
create_resource(model, 1, { field => matching_text })
create_resource(model, 2, { field => 'not matching text' })
create_resource(model, 3, { field => matching_text })
create_resource(model, 4, { field => redacted_text })
create_resource(model, 5, { field => matching_text })
expected = { field => 'some text /sent_notifications/REDACTED/unsubscribe more text',
"#{field}_html" => nil }
expect_any_instance_of("Gitlab::BackgroundMigration::RedactLinks::#{model}".constantize).to receive(:update_columns).with(expected).and_call_original
subject.perform(model, field, 2, 4)
expect(model.where(field => matching_text).pluck(:id)).to eq [1, 5]
expect(model.find(3).reload[field]).to eq redacted_text
end
end
context 'resource is Issue' do
it_behaves_like 'redactable resource' do
let(:model) { Issue }
let(:field) { :description }
end
end
context 'resource is Merge Request' do
it_behaves_like 'redactable resource' do
let(:model) { MergeRequest }
let(:field) { :description }
end
end
context 'resource is Note' do
it_behaves_like 'redactable resource' do
let(:model) { Note }
let(:field) { :note }
end
end
context 'resource is Snippet' do
it_behaves_like 'redactable resource' do
let(:model) { Snippet }
let(:field) { :description }
end
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::RollbackImportStateData, :migration, schema: 20180502134117 do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
before do
namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab1', import_url: generate(:url))
projects.create!(id: 2, namespace_id: 1, name: 'gitlab2', path: 'gitlab2', import_url: generate(:url))
import_state.create!(id: 1, project_id: 1, status: :started, last_error: "foo")
import_state.create!(id: 2, project_id: 2, status: :failed)
allow(BackgroundMigrationWorker).to receive(:perform_in)
end
it "creates new import_state records with project's import data" do
migration.perform(1, 2)
expect(projects.first.import_status).to eq("started")
expect(projects.second.import_status).to eq("failed")
expect(projects.first.import_error).to eq("foo")
end
end
require 'spec_helper'
describe Gitlab::BackgroundMigration::ScheduleDiffFilesDeletion, :migration, :sidekiq, schema: 20180619121030 do
describe '#perform' do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
before do
stub_const("#{described_class.name}::DIFF_BATCH_SIZE", 3)
namespaces.create!(id: 1, name: 'gitlab', path: 'gitlab')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab', path: 'gitlab')
merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master', state: 'merged')
merge_request_diffs.create!(id: 1, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 2, merge_request_id: 1, state: 'empty')
merge_request_diffs.create!(id: 3, merge_request_id: 1, state: 'without_files')
merge_request_diffs.create!(id: 4, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 5, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 6, merge_request_id: 1, state: 'collected')
merge_request_diffs.create!(id: 7, merge_request_id: 1, state: 'collected')
merge_requests.update(1, latest_merge_request_diff_id: 7)
end
it 'correctly schedules diff file deletion workers' do
Sidekiq::Testing.fake! do
Timecop.freeze do
described_class.new.perform
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, [1, 4, 5])
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, [6])
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
end
......@@ -162,7 +162,6 @@ describe Gitlab::Danger::Helper do
'db/foo' | :database
'qa/foo' | :qa
'ee/db/foo' | :database
'ee/qa/foo' | :qa
'changelogs/foo' | :none
......
......@@ -214,6 +214,23 @@ describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
it 'allows the use of a custom key name' do
expect(model).to receive(:add_foreign_key).with(
:projects,
:users,
column: :user_id,
on_delete: :cascade,
name: :foo
)
model.add_concurrent_foreign_key(
:projects,
:users,
column: :user_id,
name: :foo
)
end
it 'does not create a foreign key if it exists already' do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users, column: :user_id).and_return(true)
expect(model).not_to receive(:add_foreign_key)
......@@ -257,6 +274,16 @@ describe Gitlab::Database::MigrationHelpers do
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
it 'allows the use of a custom key name' do
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT.+foo/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id, name: :foo)
end
end
end
end
......
......@@ -20,6 +20,9 @@ issues:
- timelogs
- issue_assignees
- closed_by
- epic_issue
- epic
- designs
events:
- author
- project
......@@ -38,6 +41,7 @@ notes:
- system_note_metadata
- note_diff_file
- suggestions
- review
label_links:
- target
- label
......@@ -57,6 +61,7 @@ milestone:
- merge_requests
- participants
- events
- boards
snippets:
- author
- project
......@@ -103,6 +108,19 @@ merge_requests:
- merge_request_assignees
- suggestions
- assignees
- reviews
- approval_rules
- approvals
- approvers
- approver_users
- approver_groups
- approved_by_users
- draft_notes
- merge_train
- blocks_as_blocker
- blocks_as_blockee
- blocking_merge_requests
- blocked_merge_requests
merge_request_diff:
- merge_request
- merge_request_diff_commits
......@@ -135,6 +153,16 @@ ci_pipelines:
- deployments
- environments
- chat_data
- source_pipeline
- source_bridge
- source_job
- sourced_pipelines
- triggered_by_pipeline
- triggered_pipelines
- downstream_bridges
- job_artifacts
- vulnerabilities_occurrence_pipelines
- vulnerabilities
pipeline_variables:
- pipeline
stages:
......@@ -184,13 +212,18 @@ protected_branches:
- project
- merge_access_levels
- push_access_levels
- unprotect_access_levels
protected_tags:
- project
- create_access_levels
merge_access_levels:
- protected_branch
- user
- group
push_access_levels:
- protected_branch
- user
- group
create_access_levels:
- user
- protected_tag
......@@ -325,6 +358,45 @@ project:
- kubernetes_namespaces
- error_tracking_setting
- metrics_setting
- gitlab_slack_application_service
- github_service
- protected_environments
- mirror_user
- push_rule
- jenkins_service
- jenkins_deprecated_service
- index_status
- feature_usage
- approval_rules
- approvers
- approver_users
- pages_domains
- audit_events
- path_locks
- approver_groups
- repository_state
- source_pipelines
- sourced_pipelines
- prometheus_metrics
- vulnerabilities
- vulnerability_feedback
- vulnerability_identifiers
- vulnerability_scanners
- operations_feature_flags
- operations_feature_flags_client
- prometheus_alerts
- prometheus_alert_events
- software_license_policies
- project_registry
- packages
- package_files
- tracing_setting
- alerting_setting
- webide_pipelines
- reviews
- incident_management_setting
- merge_trains
- designs
award_emoji:
- awardable
- user
......@@ -332,6 +404,7 @@ priorities:
- label
prometheus_metrics:
- project
- prometheus_alerts
timelogs:
- issue
- merge_request
......@@ -365,3 +438,34 @@ suggestions:
- note
metrics_setting:
- project
protected_environments:
- project
- deploy_access_levels
deploy_access_levels:
- protected_environment
- user
- group
unprotect_access_levels:
- user
- protected_branch
- group
prometheus_alerts:
- project
- prometheus_alert_events
prometheus_alert_events:
- project
epic_issues:
- issue
- epic
tracing_setting:
- project
reviews:
- project
- merge_request
- author
- notes
incident_management_setting:
- project
merge_trains:
- project
- merge_request
......@@ -10,7 +10,7 @@ require 'spec_helper'
describe 'Import/Export attribute configuration' do
include ConfigurationHelper
let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:relation_names) do
names = names_from_tree(config_hash['project_tree'])
......@@ -23,9 +23,6 @@ describe 'Import/Export attribute configuration' do
let(:safe_attributes_file) { 'spec/lib/gitlab/import_export/safe_model_attributes.yml' }
let(:safe_model_attributes) { YAML.load_file(safe_attributes_file) }
let(:ee_safe_attributes_file) { 'ee/spec/lib/gitlab/import_export/safe_model_attributes.yml' }
let(:ee_safe_model_attributes) { File.exist?(ee_safe_attributes_file) ? YAML.load_file(ee_safe_attributes_file) : {} }
it 'has no new columns' do
relation_names.each do |relation_name|
relation_class = relation_class_for_name(relation_name)
......@@ -34,10 +31,6 @@ describe 'Import/Export attribute configuration' do
current_attributes = parsed_attributes(relation_name, relation_attributes)
safe_attributes = safe_model_attributes[relation_class.to_s].dup || []
ee_safe_model_attributes[relation_class.to_s].to_a.each do |attribute|
safe_attributes << attribute
end
expect(safe_attributes).not_to be_nil, "Expected exported class #{relation_class} to exist in safe_model_attributes"
new_attributes = current_attributes - safe_attributes
......@@ -51,8 +44,7 @@ describe 'Import/Export attribute configuration' do
It looks like #{relation_class}, which is exported using the project Import/Export, has new attributes: #{new_attributes.join(',')}
Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if you consider this can be exported.
#{"If the model/associations are EE-specific, use `#{File.expand_path(ee_safe_attributes_file)}`.\n" if ee_safe_model_attributes.any?}
Otherwise, please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
Please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
model in the +excluded_attributes+ section.
SAFE_MODEL_ATTRIBUTES: #{File.expand_path(safe_attributes_file)}
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::Config do
let(:yaml_file) { described_class.new }
describe '#to_h' do
context 'when using CE' do
before do
allow(yaml_file)
.to receive(:merge?)
.and_return(false)
end
it 'just returns the parsed Hash without the EE section' do
expected = YAML.load_file(Gitlab::ImportExport.config_file)
expected.delete('ee')
expect(yaml_file.to_h).to eq(expected)
end
end
context 'when using EE' do
before do
allow(yaml_file)
.to receive(:merge?)
.and_return(true)
end
it 'merges the EE project tree into the CE project tree' do
allow(yaml_file)
.to receive(:parse_yaml)
.and_return({
'project_tree' => [
{
'issues' => [
:id,
:title,
{ 'notes' => [:id, :note, { 'author' => [:name] }] }
]
}
],
'ee' => {
'project_tree' => [
{
'issues' => [
:description,
{ 'notes' => [:date, { 'author' => [:email] }] }
]
},
{ 'foo' => [{ 'bar' => %i[baz] }] }
]
}
})
expect(yaml_file.to_h).to eq({
'project_tree' => [
{
'issues' => [
:id,
:title,
{
'notes' => [
:id,
:note,
{ 'author' => [:name, :email] },
:date
]
},
:description
]
},
{ 'foo' => [{ 'bar' => %i[baz] }] }
]
})
end
it 'merges the excluded attributes list' do
allow(yaml_file)
.to receive(:parse_yaml)
.and_return({
'project_tree' => [],
'excluded_attributes' => {
'project' => %i[id title],
'notes' => %i[id]
},
'ee' => {
'project_tree' => [],
'excluded_attributes' => {
'project' => %i[date],
'foo' => %i[bar baz]
}
}
})
expect(yaml_file.to_h).to eq({
'project_tree' => [],
'excluded_attributes' => {
'project' => %i[id title date],
'notes' => %i[id],
'foo' => %i[bar baz]
}
})
end
it 'merges the included attributes list' do
allow(yaml_file)
.to receive(:parse_yaml)
.and_return({
'project_tree' => [],
'included_attributes' => {
'project' => %i[id title],
'notes' => %i[id]
},
'ee' => {
'project_tree' => [],
'included_attributes' => {
'project' => %i[date],
'foo' => %i[bar baz]
}
}
})
expect(yaml_file.to_h).to eq({
'project_tree' => [],
'included_attributes' => {
'project' => %i[id title date],
'notes' => %i[id],
'foo' => %i[bar baz]
}
})
end
it 'merges the methods list' do
allow(yaml_file)
.to receive(:parse_yaml)
.and_return({
'project_tree' => [],
'methods' => {
'project' => %i[id title],
'notes' => %i[id]
},
'ee' => {
'project_tree' => [],
'methods' => {
'project' => %i[date],
'foo' => %i[bar baz]
}
}
})
expect(yaml_file.to_h).to eq({
'project_tree' => [],
'methods' => {
'project' => %i[id title date],
'notes' => %i[id],
'foo' => %i[bar baz]
}
})
end
end
end
end
......@@ -6,7 +6,7 @@ require 'spec_helper'
describe 'Import/Export model configuration' do
include ConfigurationHelper
let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
let(:config_hash) { Gitlab::ImportExport::Config.new.to_h.deep_stringify_keys }
let(:model_names) do
names = names_from_tree(config_hash['project_tree'])
......@@ -16,26 +16,9 @@ describe 'Import/Export model configuration' do
# - User, Author... Models we do not care about for checking models
names.flatten.uniq - %w(milestones labels user author) + ['project']
end
let(:ce_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' }
let(:ce_models_hash) { YAML.load_file(ce_models_yml) }
let(:ee_models_yml) { 'ee/spec/lib/gitlab/import_export/all_models.yml' }
let(:ee_models_hash) { File.exist?(ee_models_yml) ? YAML.load_file(ee_models_yml) : {} }
let(:all_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' }
let(:all_models_hash) { YAML.load_file(all_models_yml) }
let(:current_models) { setup_models }
let(:all_models_hash) do
all_models_hash = ce_models_hash.dup
all_models_hash.each do |model, associations|
associations.concat(ee_models_hash[model] || [])
end
ee_models_hash.each do |model, associations|
all_models_hash[model] ||= associations
end
all_models_hash
end
it 'has no new models' do
model_names.each do |model_name|
......@@ -59,8 +42,7 @@ describe 'Import/Export model configuration' do
If you think this model should be included in the export, please add it to `#{Gitlab::ImportExport.config_file}`.
Definitely add it to `#{File.expand_path(ce_models_yml)}`
#{"or `#{File.expand_path(ee_models_yml)}` if the model/associations are EE-specific\n" if ee_models_hash.any?}
Definitely add it to `#{File.expand_path(all_models_yml)}`
to signal that you've handled this error and to prevent it from showing up in the future.
MSG
end
......
......@@ -24,6 +24,7 @@ Issue:
- weight
- time_estimate
- relative_position
- service_desk_reply_to
- last_edited_at
- last_edited_by_id
- discussion_locked
......@@ -68,6 +69,7 @@ Note:
- resolved_by_push
- discussion_id
- original_discussion_id
- review_id
LabelLink:
- id
- label_id
......@@ -144,6 +146,8 @@ ProjectMember:
- invite_accepted_at
- requested_at
- expires_at
- ldap
- override
User:
- id
- username
......@@ -316,6 +320,7 @@ CommitStatus:
- protected
- failure_reason
- scheduled_at
- upstream_pipeline_id
Ci::Variable:
- id
- project_id
......@@ -491,6 +496,17 @@ Project:
- printing_merge_request_link_enabled
- resolve_outdated_diff_discussions
- build_allow_git_fetch
- merge_requests_template
- merge_requests_rebase_enabled
- approvals_before_merge
- merge_requests_author_approval
- reset_approvals_on_push
- disable_overriding_approvers_per_merge_request
- merge_requests_ff_only_enabled
- issues_template
- repository_size_limit
- sync_time
- service_desk_enabled
- last_repository_updated_at
- ci_config_path
- delete_error
......@@ -498,7 +514,13 @@ Project:
- merge_requests_rebase_enabled
- jobs_cache_index
- external_authorization_classification_label
- external_webhook_token
- pages_https_only
- merge_requests_disable_committers_approval
- merge_requests_require_code_owner_approval
- require_password_to_approve
ProjectTracingSetting:
- external_url
Author:
- name
ProjectFeature:
......@@ -519,12 +541,24 @@ ProtectedBranch::MergeAccessLevel:
- access_level
- created_at
- updated_at
- user_id
- group_id
ProtectedBranch::PushAccessLevel:
- id
- protected_branch_id
- access_level
- created_at
- updated_at
- user_id
- group_id
ProtectedBranch::UnprotectAccessLevel:
- id
- protected_branch_id
- access_level
- created_at
- updated_at
- user_id
- group_id
ProtectedTag::CreateAccessLevel:
- id
- protected_tag_id
......@@ -587,6 +621,12 @@ PrometheusMetric:
- group
- common
- identifier
PrometheusAlert:
- threshold
- operator
- environment_id
- project_id
- prometheus_metric_id
Badge:
- id
- link_url
......@@ -598,6 +638,20 @@ Badge:
- type
ProjectCiCdSetting:
- group_runners_enabled
ProtectedEnvironment:
- id
- project_id
- name
- created_at
- updated_at
ProtectedEnvironment::DeployAccessLevel:
- id
- protected_environment_id
- access_level
- created_at
- updated_at
- user_id
- group_id
ResourceLabelEvent:
- id
- action
......
......@@ -5,8 +5,7 @@ require 'spec_helper'
describe ActiveRecord::Schema do
let(:latest_migration_timestamp) do
migrations_paths = %w[db ee/db]
.product(%w[migrate post_migrate])
migrations_paths = %w[db/migrate db/post_migrate]
.map { |path| Rails.root.join(*path, '*') }
migrations = Dir[*migrations_paths]
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190404143330_add_unique_constraint_to_approvals_user_id_and_merge_request_id.rb')
describe AddUniqueConstraintToApprovalsUserIdAndMergeRequestId, :migration do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
let(:approvals) { table(:approvals) }
describe '#up' do
before do
namespaces.create(id: 1, name: 'ns', path: 'ns')
projects.create(id: 1, namespace_id: 1)
merge_requests.create(id: 1, target_branch: 'master', source_branch: 'feature-1', target_project_id: 1)
merge_requests.create(id: 2, target_branch: 'master', source_branch: 'feature-2', target_project_id: 1)
end
it 'deletes duplicate records and keeps the first one' do
first_approval = approvals.create(id: 1, merge_request_id: 1, user_id: 1)
approvals.create(id: 2, merge_request_id: 1, user_id: 1)
migration.up
expect(approvals.all.to_a).to contain_exactly(first_approval)
end
it 'does not delete unique records' do
unique_approvals = [
approvals.create(id: 1, merge_request_id: 1, user_id: 1),
approvals.create(id: 2, merge_request_id: 1, user_id: 2),
approvals.create(id: 3, merge_request_id: 2, user_id: 1)
]
migration.up
expect(approvals.all.to_a).to contain_exactly(*unique_approvals)
end
it 'creates unique index' do
migration.up
expect(migration.index_exists?(:approvals, [:user_id, :merge_request_id], unique: true)).to be_truthy
end
end
describe '#down' do
it 'removes unique index' do
migration.up
migration.down
expect(migration.index_exists?(:approvals, [:user_id, :merge_request_id], unique: true)).to be_falsey
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20171216111734_clean_up_for_members.rb')
describe CleanUpForMembers, :migration do
before do
stub_feature_flags(enforced_sso: false)
end
let(:migration) { described_class.new }
let(:groups) { table(:namespaces) }
let!(:group_member) { create_group_member }
let!(:unbinded_group_member) { create_group_member }
let!(:invited_group_member) { create_group_member(true) }
let!(:not_valid_group_member) { create_group_member }
let!(:project_member) { create_project_member }
let!(:invited_project_member) { create_project_member(true) }
let!(:unbinded_project_member) { create_project_member }
let!(:not_valid_project_member) { create_project_member }
it 'removes members without proper user_id' do
unbinded_group_member.update_column(:user_id, nil)
not_valid_group_member.update_column(:user_id, 9999)
unbinded_project_member.update_column(:user_id, nil)
not_valid_project_member.update_column(:user_id, 9999)
migrate!
expect(Member.all).not_to include(unbinded_group_member, not_valid_group_member, unbinded_project_member, not_valid_project_member)
expect(Member.all).to include(group_member, invited_group_member, project_member, invited_project_member)
end
def create_group_member(invited = false)
fill_member(GroupMember.new(source_id: create_group.id, source_type: 'Namespace'), invited)
end
def create_project_member(invited = false)
fill_member(ProjectMember.new(project: create_project), invited)
end
def fill_member(member_object, invited)
member_object.tap do |m|
m.access_level = 40
m.notification_level = 3
if invited
m.user_id = nil
m.invite_token = 'xxx'
m.invite_email = 'email@email.com'
else
m.user_id = create_user.id
end
m.save
end
member_object
end
def create_group
name = FFaker::Lorem.characters(10)
groups.create!(type: 'Group', name: name, path: name.downcase.gsub(/\s/, '_'))
end
def create_project
name = FFaker::Lorem.characters(10)
creator = create_user
Project.create(name: name,
path: name.downcase.gsub(/\s/, '_'),
namespace: creator.namespace,
creator: creator)
end
def create_user
User.create(email: FFaker::Internet.email,
password: '12345678',
name: FFaker::Name.name,
username: FFaker::Internet.user_name,
confirmed_at: Time.now,
confirmation_token: nil)
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170502101023_cleanup_namespaceless_pending_delete_projects.rb')
describe CleanupNamespacelessPendingDeleteProjects, :migration, schema: 20180222043024 do
let(:projects) { table(:projects) }
before do
# Stub after_save callbacks that will fail when Project has no namespace
allow_any_instance_of(Project).to receive(:ensure_storage_path_exists).and_return(nil)
allow_any_instance_of(Project).to receive(:update_project_statistics).and_return(nil)
end
describe '#up' do
it 'only cleans up pending delete projects' do
projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ee', namespace_id: 2, pending_delete: true)
project = Project.new(pending_delete: true, namespace_id: nil)
project.save(validate: false)
expect(NamespacelessProjectDestroyWorker).to receive(:bulk_perform_async).with([[project.id]])
described_class.new.up
end
it 'does nothing when no pending delete projects without namespace found' do
projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ee', namespace_id: 2, pending_delete: true)
expect(NamespacelessProjectDestroyWorker).not_to receive(:bulk_perform_async)
described_class.new.up
end
end
end
......@@ -6,37 +6,32 @@ describe CreateMissingNamespaceForInternalUsers, :migration do
let(:namespaces) { table(:namespaces) }
let(:routes) { table(:routes) }
internal_user_types = [:ghost]
internal_user_types << :support_bot if ActiveRecord::Base.connection.column_exists?(:users, :support_bot)
internal_user_types.each do |attr|
context "for #{attr} user" do
let(:internal_user) do
users.create!(email: 'test@example.com', projects_limit: 100, username: 'test', attr => true)
end
context "for ghost user" do
let(:internal_user) do
users.create!(email: 'test@example.com', projects_limit: 100, username: 'test', ghost: true)
end
it 'creates the missing namespace' do
expect(namespaces.find_by(owner_id: internal_user.id)).to be_nil
it 'creates the missing namespace' do
expect(namespaces.find_by(owner_id: internal_user.id)).to be_nil
migrate!
migrate!
namespace = Namespace.find_by(type: nil, owner_id: internal_user.id)
route = namespace.route
namespace = Namespace.find_by(type: nil, owner_id: internal_user.id)
route = namespace.route
expect(namespace.path).to eq(route.path)
expect(namespace.name).to eq(route.name)
end
expect(namespace.path).to eq(route.path)
expect(namespace.name).to eq(route.name)
end
it 'sets notification email' do
users.update(internal_user.id, notification_email: nil)
it 'sets notification email' do
users.update(internal_user.id, notification_email: nil)
expect(users.find(internal_user.id).notification_email).to be_nil
expect(users.find(internal_user.id).notification_email).to be_nil
migrate!
migrate!
user = users.find(internal_user.id)
expect(user.notification_email).to eq(user.email)
end
user = users.find(internal_user.id)
expect(user.notification_email).to eq(user.email)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180723130817_delete_inconsistent_internal_id_records.rb')
describe DeleteInconsistentInternalIdRecords, :migration do
let!(:namespace) { table(:namespaces).create!(name: 'test', path: 'test') }
let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
let!(:project3) { table(:projects).create!(namespace_id: namespace.id) }
let(:internal_ids) { table(:internal_ids) }
let(:internal_id_query) { ->(project) { InternalId.where(usage: InternalId.usages[scope.to_s.tableize], project_id: project.id) } }
let(:create_models) do
[project1, project2, project3].each do |project|
3.times do |i|
attributes = required_attributes.merge(project_id: project.id,
iid: i.succ)
table(scope.to_s.pluralize).create!(attributes)
end
end
end
shared_examples_for 'deleting inconsistent internal_id records' do
before do
create_models
[project1, project2, project3].each do |project|
internal_ids.create!(project_id: project.id, usage: InternalId.usages[scope.to_s.tableize], last_value: 3)
end
internal_id_query.call(project1).first.tap do |iid|
iid.last_value = iid.last_value - 2
# This is an inconsistent record
iid.save!
end
internal_id_query.call(project3).first.tap do |iid|
iid.last_value = iid.last_value + 2
# This is a consistent record
iid.save!
end
end
it "deletes inconsistent records" do
expect { migrate! }.to change { internal_id_query.call(project1).size }.from(1).to(0)
end
it "retains consistent records" do
expect { migrate! }.not_to change { internal_id_query.call(project2).size }
end
it "retains consistent records, especially those with a greater last_value" do
expect { migrate! }.not_to change { internal_id_query.call(project3).size }
end
end
context 'for issues' do
let(:scope) { :issue }
let(:required_attributes) { {} }
it_behaves_like 'deleting inconsistent internal_id records'
end
context 'for merge_requests' do
let(:scope) { :merge_request }
let(:create_models) do
[project1, project2, project3].each do |project|
3.times do |i|
table(:merge_requests).create!(
target_project_id: project.id,
source_project_id: project.id,
target_branch: 'master',
source_branch: j.to_s,
iid: i.succ
)
end
end
end
it_behaves_like 'deleting inconsistent internal_id records'
end
context 'for deployments' do
let(:scope) { :deployment }
let(:deployments) { table(:deployments) }
let(:create_models) do
3.times { |i| deployments.create!(project_id: project1.id, iid: i, environment_id: 1, ref: 'master', sha: 'a', tag: false) }
3.times { |i| deployments.create!(project_id: project2.id, iid: i, environment_id: 1, ref: 'master', sha: 'a', tag: false) }
3.times { |i| deployments.create!(project_id: project3.id, iid: i, environment_id: 1, ref: 'master', sha: 'a', tag: false) }
end
it_behaves_like 'deleting inconsistent internal_id records'
end
context 'for milestones (by project)' do
let(:scope) { :milestone }
let(:required_attributes) { { title: 'test' } }
it_behaves_like 'deleting inconsistent internal_id records'
end
context 'for ci_pipelines' do
let(:scope) { :ci_pipeline }
let(:required_attributes) { { ref: 'test' } }
it_behaves_like 'deleting inconsistent internal_id records'
end
context 'for milestones (by group)' do
# milestones (by group) is a little different than most of the other models
let(:groups) { table(:namespaces) }
let(:group1) { groups.create(name: 'Group 1', type: 'Group', path: 'group_1') }
let(:group2) { groups.create(name: 'Group 2', type: 'Group', path: 'group_2') }
let(:group3) { groups.create(name: 'Group 2', type: 'Group', path: 'group_3') }
let(:internal_id_query) { ->(group) { InternalId.where(usage: InternalId.usages['milestones'], namespace_id: group.id) } }
before do
[group1, group2, group3].each do |group|
3.times do |i|
table(:milestones).create!(
group_id: group.id,
title: 'test',
iid: i.succ
)
end
internal_ids.create!(namespace_id: group.id, usage: InternalId.usages['milestones'], last_value: 3)
end
internal_id_query.call(group1).first.tap do |iid|
iid.last_value = iid.last_value - 2
# This is an inconsistent record
iid.save!
end
internal_id_query.call(group3).first.tap do |iid|
iid.last_value = iid.last_value + 2
# This is a consistent record
iid.save!
end
end
it "deletes inconsistent records" do
expect { migrate! }.to change { internal_id_query.call(group1).size }.from(1).to(0)
end
it "retains consistent records" do
expect { migrate! }.not_to change { internal_id_query.call(group2).size }
end
it "retains consistent records, especially those with a greater last_value" do
expect { migrate! }.not_to change { internal_id_query.call(group3).size }
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180619121030_enqueue_delete_diff_files_workers.rb')
describe EnqueueDeleteDiffFilesWorkers, :migration, :sidekiq do
it 'correctly schedules diff files deletion schedulers' do
Sidekiq::Testing.fake! do
expect(BackgroundMigrationWorker)
.to receive(:perform_async)
.with(described_class::SCHEDULER)
.and_call_original
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq(1)
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181014121030_enqueue_redact_links.rb')
describe EnqueueRedactLinks, :migration, :sidekiq do
let(:merge_requests) { table(:merge_requests) }
let(:issues) { table(:issues) }
let(:notes) { table(:notes) }
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
let(:snippets) { table(:snippets) }
let(:users) { table(:users) }
let(:user) { users.create!(email: 'test@example.com', projects_limit: 100, username: 'test') }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
text = 'some text /sent_notifications/00000000000000000000000000000000/unsubscribe more text'
group = namespaces.create!(name: 'gitlab', path: 'gitlab')
project = projects.create!(namespace_id: group.id)
merge_requests.create!(id: 1, target_project_id: project.id, source_project_id: project.id, target_branch: 'feature', source_branch: 'master', description: text)
issues.create!(id: 1, description: text)
notes.create!(id: 1, note: text)
notes.create!(id: 2, note: text)
snippets.create!(id: 1, description: text, author_id: user.id)
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "Note", "note", 1, 1)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, "Note", "note", 2, 2)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "Issue", "description", 1, 1)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "MergeRequest", "description", 1, 1)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, "Snippet", "description", 1, 1)
expect(BackgroundMigrationWorker.jobs.size).to eq 5
end
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180502134117_migrate_import_attributes_data_from_projects_to_project_mirror_data.rb')
describe MigrateImportAttributesDataFromProjectsToProjectMirrorData, :sidekiq, :migration do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:import_state) { table(:project_mirror_data) }
before do
stub_const("#{described_class}::BATCH_SIZE", 1)
namespaces.create(id: 1, name: 'gitlab-org', path: 'gitlab-org')
projects.create!(id: 1, namespace_id: 1, name: 'gitlab1',
path: 'gitlab1', import_error: "foo", import_status: :started,
import_url: generate(:url))
projects.create!(id: 2, namespace_id: 1, name: 'gitlab2',
path: 'gitlab2', import_error: "bar", import_status: :failed,
import_url: generate(:url))
projects.create!(id: 3, namespace_id: 1, name: 'gitlab3', path: 'gitlab3', import_status: :none, import_url: generate(:url))
end
it 'schedules delayed background migrations in batches in bulk' do
Sidekiq::Testing.fake! do
Timecop.freeze do
expect(projects.where.not(import_status: :none).count).to eq(2)
subject.up
expect(BackgroundMigrationWorker.jobs.size).to eq 2
expect(described_class::UP_MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
expect(described_class::UP_MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
end
end
end
describe '#down' do
before do
import_state.create!(id: 1, project_id: 1, status: :started)
import_state.create!(id: 2, project_id: 2, status: :started)
end
it 'schedules delayed background migrations in batches in bulk for rollback' do
Sidekiq::Testing.fake! do
Timecop.freeze do
expect(import_state.where.not(status: :none).count).to eq(2)
subject.down
expect(BackgroundMigrationWorker.jobs.size).to eq 2
expect(described_class::DOWN_MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
expect(described_class::DOWN_MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
end
end
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180521162137_migrate_remaining_mr_metrics_populating_background_migration.rb')
describe MigrateRemainingMrMetricsPopulatingBackgroundMigration, :migration, :sidekiq do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:mrs) { table(:merge_requests) }
before do
namespaces.create!(id: 1, name: 'foo', path: 'foo')
projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 1)
projects.create!(id: 456, name: 'gitlab2', path: 'gitlab2', namespace_id: 1)
projects.create!(id: 789, name: 'gitlab3', path: 'gitlab3', namespace_id: 1)
mrs.create!(title: 'foo', target_branch: 'target', source_branch: 'source', target_project_id: 123)
mrs.create!(title: 'bar', target_branch: 'target', source_branch: 'source', target_project_id: 456)
mrs.create!(title: 'kux', target_branch: 'target', source_branch: 'source', target_project_id: 789)
end
it 'correctly schedules background migrations' do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(10.minutes, mrs.first.id, mrs.second.id)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(20.minutes, mrs.third.id, mrs.third.id)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20181204154019_populate_mr_metrics_with_events_data.rb')
describe PopulateMrMetricsWithEventsData, :migration, :sidekiq do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:namespace) { namespaces.create(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create(namespace_id: namespace.id, name: 'foo') }
let(:merge_requests) { table(:merge_requests) }
def create_merge_request(id)
params = {
id: id,
target_project_id: project.id,
target_branch: 'master',
source_project_id: project.id,
source_branch: 'mr name',
title: "mr name#{id}"
}
merge_requests.create!(params)
end
it 'correctly schedules background migrations' do
create_merge_request(1)
create_merge_request(2)
create_merge_request(3)
stub_const("#{described_class.name}::BATCH_SIZE", 2)
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(8.minutes, 1, 2)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(16.minutes, 3, 3)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190418132125_populate_project_statistics_packages_size.rb')
describe PopulateProjectStatisticsPackagesSize, :migration do
let(:project_statistics) { table(:project_statistics) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:packages) { table(:packages_packages) }
let(:package_files) { table(:packages_package_files) }
let(:file_size) { 1.kilobyte }
let(:repo_size) { 2.megabytes }
let(:lfs_size) { 3.gigabytes }
let(:artifacts_size) { 4.terabytes }
let(:storage_size) { repo_size + lfs_size + artifacts_size }
let(:namespace) { namespaces.create(name: 'foo', path: 'foo') }
let(:package) { packages.create!(project_id: project.id, name: 'a package', package_type: 1) }
let(:project) { projects.create!(namespace_id: namespace.id) }
let!(:statistics) { project_statistics.create!(project_id: project.id, namespace_id: namespace.id, storage_size: storage_size, repository_size: repo_size, lfs_objects_size: lfs_size, build_artifacts_size: artifacts_size) }
let!(:package_file) { package_files.create!(package_id: package.id, file: 'a file.txt', file_name: 'a file.txt', size: file_size)}
it 'backfills ProjectStatistics packages_size' do
expect { migrate! }
.to change { statistics.reload.packages_size }
.from(nil).to(file_size)
end
it 'updates ProjectStatistics storage_size' do
expect { migrate! }
.to change { statistics.reload.storage_size }
.by(file_size)
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190520201748_populate_rule_type_on_approval_merge_request_rules.rb')
describe PopulateRuleTypeOnApprovalMergeRequestRules, :migration do
let(:migration) { described_class.new }
describe '#up' do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
let(:approval_rules) { table(:approval_merge_request_rules) }
# We use integers here since at the time of writing CE does not yet have the
# appropriate models and enum definitions.
let(:regular_rule_type) { 1 }
let(:code_owner_rule_type) { 2 }
before do
namespaces.create!(id: 11, name: 'gitlab', path: 'gitlab')
projects.create!(id: 101, namespace_id: 11, name: 'gitlab', path: 'gitlab')
merge_requests.create!(id: 1, target_project_id: 101, source_project_id: 101, target_branch: 'feature', source_branch: 'master')
approval_rules.create!(id: 1, merge_request_id: 1, name: "Default", code_owner: false, rule_type: regular_rule_type)
approval_rules.create!(id: 2, merge_request_id: 1, name: "Code Owners", code_owner: true, rule_type: regular_rule_type)
end
it 'backfills ApprovalMergeRequestRules code_owner rule_type' do
expect(approval_rules.where(rule_type: regular_rule_type).pluck(:id)).to contain_exactly(1, 2)
expect(approval_rules.where(rule_type: code_owner_rule_type).pluck(:id)).to be_empty
migrate!
expect(approval_rules.where(rule_type: regular_rule_type).pluck(:id)).to contain_exactly(1)
expect(approval_rules.where(rule_type: code_owner_rule_type).pluck(:id)).to contain_exactly(2)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20180906051323_remove_orphaned_label_links.rb')
describe RemoveOrphanedLabelLinks, :migration do
let(:label_links) { table(:label_links) }
let(:labels) { table(:labels) }
let(:project) { create(:project) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
let(:label) { create_label }
before do
# This migration was created before we introduced ProjectCiCdSetting#default_git_depth
allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth).and_return(nil)
allow_any_instance_of(ProjectCiCdSetting).to receive(:default_git_depth=).and_return(0)
end
context 'add foreign key on label_id' do
let!(:label_link_with_label) { create_label_link(label_id: label.id) }
let!(:label_link_without_label) { create_label_link(label_id: nil) }
it 'removes orphaned labels without corresponding label' do
expect { migrate! }.to change { LabelLink.count }.from(2).to(1)
end
it 'does not remove entries with valid label_id' do
expect { migrate! }.not_to change { label_link_with_label.reload }
end
end
def create_label(**opts)
labels.create!(
project_id: project.id,
**opts
)
end
def create_label_link(**opts)
label_links.create!(
target_id: 1,
target_type: 'Issue',
**opts
)
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20171207150343_remove_soft_removed_objects.rb')
describe RemoveSoftRemovedObjects, :migration do
describe '#up' do
let!(:groups) do
table(:namespaces).tap do |t|
t.inheritance_column = nil
end
end
let!(:routes) do
table(:routes).tap do |t|
t.inheritance_column = nil
end
end
it 'removes various soft removed objects' do
5.times do
create_with_deleted_at(:issue)
end
regular_issue = create(:issue) # rubocop:disable RSpec/FactoriesInMigrationSpecs
run_migration
expect(Issue.count).to eq(1)
expect(Issue.first).to eq(regular_issue)
end
it 'removes the temporary indexes once soft removed data has been removed' do
migration = described_class.new
run_migration
disable_migrations_output do
expect(migration.temporary_index_exists?(Issue)).to eq(false)
end
end
it 'removes routes of soft removed personal namespaces' do
namespace = create_with_deleted_at(:namespace)
group = groups.create!(name: 'group', path: 'group_path', type: 'Group')
routes.create!(source_id: group.id, source_type: 'Group', name: 'group', path: 'group_path')
expect(routes.where(source_id: namespace.id).exists?).to eq(true)
expect(routes.where(source_id: group.id).exists?).to eq(true)
run_migration
expect(routes.where(source_id: namespace.id).exists?).to eq(false)
expect(routes.where(source_id: group.id).exists?).to eq(true)
end
it 'schedules the removal of soft removed groups' do
group = create_deleted_group
admin = create(:user, admin: true) # rubocop:disable RSpec/FactoriesInMigrationSpecs
expect_any_instance_of(GroupDestroyWorker)
.to receive(:perform)
.with(group.id, admin.id)
run_migration
end
it 'does not remove soft removed groups when no admin user could be found' do
create_deleted_group
expect_any_instance_of(GroupDestroyWorker)
.not_to receive(:perform)
run_migration
end
end
def run_migration
disable_migrations_output do
migrate!
end
end
def create_with_deleted_at(*args)
row = create(*args) # rubocop:disable RSpec/FactoriesInMigrationSpecs
# We set "deleted_at" this way so we don't run into any column cache issues.
row.class.where(id: row.id).update_all(deleted_at: 1.year.ago)
row
end
def create_deleted_group
group = groups.create!(name: 'group', path: 'group_path', type: 'Group')
routes.create!(source_id: group.id, source_type: 'Group', name: 'group', path: 'group_path')
groups.where(id: group.id).update_all(deleted_at: 1.year.ago)
group
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20171005130944_schedule_create_gpg_key_subkeys_from_gpg_keys')
describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
before do
create(:gpg_key, id: 1, key: GpgHelpers::User1.public_key) # rubocop:disable RSpec/FactoriesInMigrationSpecs
create(:gpg_key, id: 2, key: GpgHelpers::User3.public_key) # rubocop:disable RSpec/FactoriesInMigrationSpecs
# Delete all subkeys so they can be recreated
GpgKeySubkey.destroy_all # rubocop: disable DestroyAll
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
migrate!
expect(described_class::MIGRATION).to be_scheduled_migration(1)
expect(described_class::MIGRATION).to be_scheduled_migration(2)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
it 'schedules background migrations' do
perform_enqueued_jobs do
expect(GpgKeySubkey.count).to eq(0)
migrate!
expect(GpgKeySubkey.count).to eq(3)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190402224749_schedule_merge_request_assignees_migration_progress_check.rb')
describe ScheduleMergeRequestAssigneesMigrationProgressCheck do
describe '#up' do
it 'schedules MergeRequestAssigneesMigrationProgressCheck background job' do
expect(BackgroundMigrationWorker).to receive(:perform_async)
.with(described_class::MIGRATION)
.and_call_original
subject.up
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170703130158_schedule_merge_request_diff_migrations')
describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:projects) { table(:projects) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
projects.create!(id: 1, name: 'gitlab', path: 'gitlab')
merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master')
merge_request_diffs.create!(id: 1, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: nil)
merge_request_diffs.create!(id: 2, merge_request_id: 1, st_commits: nil, st_diffs: YAML.dump([]))
merge_request_diffs.create!(id: 3, merge_request_id: 1, st_commits: nil, st_diffs: nil)
merge_request_diffs.create!(id: 4, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: YAML.dump([]))
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(5.minutes, 1, 1)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, 2, 2)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(15.minutes, 4, 4)
expect(BackgroundMigrationWorker.jobs.size).to eq 3
end
end
end
it 'schedules background migrations' do
perform_enqueued_jobs do
non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
expect(merge_request_diffs.where(non_empty).count).to eq 3
migrate!
expect(merge_request_diffs.where(non_empty).count).to eq 0
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20170926150348_schedule_merge_request_diff_migrations_take_two')
describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do
let(:merge_request_diffs) { table(:merge_request_diffs) }
let(:merge_requests) { table(:merge_requests) }
let(:projects) { table(:projects) }
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
projects.create!(id: 1, name: 'gitlab', path: 'gitlab')
merge_requests.create!(id: 1, target_project_id: 1, source_project_id: 1, target_branch: 'feature', source_branch: 'master')
merge_request_diffs.create!(id: 1, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: nil)
merge_request_diffs.create!(id: 2, merge_request_id: 1, st_commits: nil, st_diffs: YAML.dump([]))
merge_request_diffs.create!(id: 3, merge_request_id: 1, st_commits: nil, st_diffs: nil)
merge_request_diffs.create!(id: 4, merge_request_id: 1, st_commits: YAML.dump([]), st_diffs: YAML.dump([]))
end
it 'correctly schedules background migrations' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(10.minutes, 1, 1)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(20.minutes, 2, 2)
expect(described_class::MIGRATION).to be_scheduled_delayed_migration(30.minutes, 4, 4)
expect(BackgroundMigrationWorker.jobs.size).to eq 3
end
end
end
it 'migrates the data' do
perform_enqueued_jobs do
non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
expect(merge_request_diffs.where(non_empty).count).to eq 3
migrate!
expect(merge_request_diffs.where(non_empty).count).to eq 0
end
end
end
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb')
describe SchedulePopulateMergeRequestMetricsWithEventsData, :migration, :sidekiq do
# commits_count attribute is added in a next migration
before do
allow_any_instance_of(MergeRequestDiff)
.to receive(:commits_count=).and_return(nil)
end
let!(:mrs) { create_list(:merge_request, 3) } # rubocop:disable RSpec/FactoriesInMigrationSpecs
it 'correctly schedules background migrations' do
stub_const("#{described_class.name}::BATCH_SIZE", 2)
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(10.minutes, mrs.first.id, mrs.second.id)
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(20.minutes, mrs.third.id, mrs.third.id)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
......@@ -38,7 +38,7 @@ describe Ci::RetryBuildService do
runner_id tag_taggings taggings tags trigger_request_id
user_id auto_canceled_by_id retried failure_reason
sourced_pipelines artifacts_file_store artifacts_metadata_store
metadata runner_session trace_chunks
metadata runner_session trace_chunks upstream_pipeline_id
artifacts_file artifacts_metadata artifacts_size].freeze
shared_examples 'build duplication' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment