Commit 615bbe30 authored by James Fargher's avatar James Fargher Committed by Alex Pooley

Extract warnings for backup tasks

Previously the db task was unique in that it had special warnings.
Encode these warnings as part of the task interface so that warning
handling can be centralised. This means the db task can be handled in
the exact same way as every other task.

A spec had to be removed because all the methods that would output the
time have been stubbed. This had only worked before because db was
handled separately.
parent e6a8b5b9
...@@ -5,6 +5,7 @@ require 'spec_helper' ...@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Backup::Repositories do RSpec.describe Backup::Repositories do
let(:progress) { spy(:stdout) } let(:progress) { spy(:stdout) }
let(:strategy) { spy(:strategy) } let(:strategy) { spy(:strategy) }
let(:destination) { 'repositories' }
subject { described_class.new(progress, max_concurrency: 1, max_storage_concurrency: 1, strategy: strategy) } subject { described_class.new(progress, max_concurrency: 1, max_storage_concurrency: 1, strategy: strategy) }
...@@ -16,9 +17,9 @@ RSpec.describe Backup::Repositories do ...@@ -16,9 +17,9 @@ RSpec.describe Backup::Repositories do
it 'calls enqueue for each repository type', :aggregate_failures do it 'calls enqueue for each repository type', :aggregate_failures do
create(:wiki_page, container: group) create(:wiki_page, container: group)
subject.dump subject.dump(destination)
expect(strategy).to have_received(:start).with(:create) expect(strategy).to have_received(:start).with(:create, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(group, Gitlab::GlRepository::WIKI) expect(strategy).to have_received(:enqueue).with(group, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:finish!) expect(strategy).to have_received(:finish!)
...@@ -31,38 +32,38 @@ RSpec.describe Backup::Repositories do ...@@ -31,38 +32,38 @@ RSpec.describe Backup::Repositories do
it 'creates the expected number of threads' do it 'creates the expected number of threads' do
expect(Thread).not_to receive(:new) expect(Thread).not_to receive(:new)
expect(strategy).to receive(:start).with(:create) expect(strategy).to receive(:start).with(:create, destination)
groups.each do |group| groups.each do |group|
expect(strategy).to receive(:enqueue).with(group, Gitlab::GlRepository::WIKI) expect(strategy).to receive(:enqueue).with(group, Gitlab::GlRepository::WIKI)
end end
expect(strategy).to receive(:finish!) expect(strategy).to receive(:finish!)
subject.dump subject.dump(destination)
end end
describe 'command failure' do describe 'command failure' do
it 'enqueue_group raises an error' do it 'enqueue_group raises an error' do
allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::WIKI).and_raise(IOError) allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::WIKI).and_raise(IOError)
expect { subject.dump }.to raise_error(IOError) expect { subject.dump(destination) }.to raise_error(IOError)
end end
it 'group query raises an error' do it 'group query raises an error' do
allow(Group).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout) allow(Group).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout) expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end end
end end
it 'avoids N+1 database queries' do it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do control_count = ActiveRecord::QueryRecorder.new do
subject.dump subject.dump(destination)
end.count end.count
create_list(:group, 2, :wiki_repo) create_list(:group, 2, :wiki_repo)
expect do expect do
subject.dump subject.dump(destination)
end.not_to exceed_query_limit(control_count) end.not_to exceed_query_limit(control_count)
end end
end end
...@@ -73,9 +74,9 @@ RSpec.describe Backup::Repositories do ...@@ -73,9 +74,9 @@ RSpec.describe Backup::Repositories do
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
it 'calls enqueue for each repository type', :aggregate_failures do it 'calls enqueue for each repository type', :aggregate_failures do
subject.restore subject.restore(destination)
expect(strategy).to have_received(:start).with(:restore) expect(strategy).to have_received(:start).with(:restore, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(group, Gitlab::GlRepository::WIKI) expect(strategy).to have_received(:enqueue).with(group, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:finish!) expect(strategy).to have_received(:finish!)
......
...@@ -2,14 +2,11 @@ ...@@ -2,14 +2,11 @@
module Backup module Backup
class Artifacts < Backup::Files class Artifacts < Backup::Files
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'artifacts', JobArtifactUploader.root, excludes: ['tmp'])
super('artifacts', JobArtifactUploader.root, excludes: ['tmp'])
end end
override :human_name
def human_name def human_name
_('artifacts') _('artifacts')
end end
......
...@@ -2,14 +2,11 @@ ...@@ -2,14 +2,11 @@
module Backup module Backup
class Builds < Backup::Files class Builds < Backup::Files
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'builds', Settings.gitlab_ci.builds_path)
super('builds', Settings.gitlab_ci.builds_path)
end end
override :human_name
def human_name def human_name
_('builds') _('builds')
end end
......
...@@ -3,10 +3,10 @@ ...@@ -3,10 +3,10 @@
require 'yaml' require 'yaml'
module Backup module Backup
class Database class Database < Task
extend ::Gitlab::Utils::Override
include Backup::Helper include Backup::Helper
attr_reader :progress attr_reader :force, :config
attr_reader :config, :db_file_name
IGNORED_ERRORS = [ IGNORED_ERRORS = [
# Ignore warnings # Ignore warnings
...@@ -18,13 +18,14 @@ module Backup ...@@ -18,13 +18,14 @@ module Backup
].freeze ].freeze
IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
def initialize(progress, filename: nil) def initialize(progress, force:)
@progress = progress super(progress)
@config = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash @config = ActiveRecord::Base.configurations.find_db_config(Rails.env).configuration_hash
@db_file_name = filename || File.join(Gitlab.config.backup.path, 'db', 'database.sql.gz') @force = force
end end
def dump override :dump
def dump(db_file_name)
FileUtils.mkdir_p(File.dirname(db_file_name)) FileUtils.mkdir_p(File.dirname(db_file_name))
FileUtils.rm_f(db_file_name) FileUtils.rm_f(db_file_name)
compress_rd, compress_wr = IO.pipe compress_rd, compress_wr = IO.pipe
...@@ -64,12 +65,24 @@ module Backup ...@@ -64,12 +65,24 @@ module Backup
raise DatabaseBackupError.new(config, db_file_name) unless success raise DatabaseBackupError.new(config, db_file_name) unless success
end end
def restore override :restore
def restore(db_file_name)
unless force
progress.puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
sleep(5)
end
# Drop all tables Load the schema to ensure we don't have any newer tables
# hanging out from a failed upgrade
puts_time 'Cleaning the database ... '.color(:blue)
Rake::Task['gitlab:db:drop_tables'].invoke
puts_time 'done'.color(:green)
decompress_rd, decompress_wr = IO.pipe decompress_rd, decompress_wr = IO.pipe
decompress_pid = spawn(*%w(gzip -cd), out: decompress_wr, in: db_file_name) decompress_pid = spawn(*%w(gzip -cd), out: decompress_wr, in: db_file_name)
decompress_wr.close decompress_wr.close
status, errors = status, @errors =
case config[:adapter] case config[:adapter]
when "postgresql" then when "postgresql" then
progress.print "Restoring PostgreSQL database #{database} ... " progress.print "Restoring PostgreSQL database #{database} ... "
...@@ -81,33 +94,47 @@ module Backup ...@@ -81,33 +94,47 @@ module Backup
Process.waitpid(decompress_pid) Process.waitpid(decompress_pid)
success = $?.success? && status.success? success = $?.success? && status.success?
if errors.present? if @errors.present?
progress.print "------ BEGIN ERRORS -----\n".color(:yellow) progress.print "------ BEGIN ERRORS -----\n".color(:yellow)
progress.print errors.join.color(:yellow) progress.print @errors.join.color(:yellow)
progress.print "------ END ERRORS -------\n".color(:yellow) progress.print "------ END ERRORS -------\n".color(:yellow)
end end
report_success(success) report_success(success)
raise Backup::Error, 'Restore failed' unless success raise Backup::Error, 'Restore failed' unless success
end
if errors.present? override :pre_restore_warning
warning = <<~MSG def pre_restore_warning
There were errors in restoring the schema. This may cause return if force
issues if this results in missing indexes, constraints, or
columns. Please record the errors above and contact GitLab <<-MSG.strip_heredoc
Support if you have questions: Be sure to stop Puma, Sidekiq, and any other process that
https://about.gitlab.com/support/ connects to the database before proceeding. For Omnibus
MSG installs, see the following link for more information:
https://docs.gitlab.com/ee/raketasks/backup_restore.html#restore-for-omnibus-gitlab-installations
warn warning.color(:red)
Gitlab::TaskHelpers.ask_to_continue Before restoring the database, we will remove all existing
end tables to avoid future upgrade problems. Be aware that if you have
custom tables in the GitLab database these tables and all data will be
removed.
MSG
end end
def enabled override :post_restore_warning
true def post_restore_warning
return unless @errors.present?
<<-MSG.strip_heredoc
There were errors in restoring the schema. This may cause
issues if this results in missing indexes, constraints, or
columns. Please record the errors above and contact GitLab
Support if you have questions:
https://about.gitlab.com/support/
MSG
end end
override :human_name
def human_name def human_name
_('database') _('database')
end end
......
# frozen_string_literal: true # frozen_string_literal: true
require 'open3' require 'open3'
require_relative 'helper'
module Backup module Backup
class Files class Files < Task
extend ::Gitlab::Utils::Override
include Backup::Helper include Backup::Helper
DEFAULT_EXCLUDE = 'lost+found' DEFAULT_EXCLUDE = 'lost+found'
attr_reader :name, :backup_tarball, :excludes attr_reader :name, :excludes
def initialize(progress, name, app_files_dir, excludes: [])
super(progress)
def initialize(name, app_files_dir, excludes: [])
@name = name @name = name
@app_files_dir = app_files_dir @app_files_dir = app_files_dir
@backup_tarball = File.join(Gitlab.config.backup.path, name + '.tar.gz')
@excludes = [DEFAULT_EXCLUDE].concat(excludes) @excludes = [DEFAULT_EXCLUDE].concat(excludes)
end end
# Copy files from public/files to backup/files # Copy files from public/files to backup/files
def dump override :dump
def dump(backup_tarball)
FileUtils.mkdir_p(Gitlab.config.backup.path) FileUtils.mkdir_p(Gitlab.config.backup.path)
FileUtils.rm_f(backup_tarball) FileUtils.rm_f(backup_tarball)
...@@ -35,7 +37,7 @@ module Backup ...@@ -35,7 +37,7 @@ module Backup
unless status == 0 unless status == 0
puts output puts output
raise_custom_error raise_custom_error(backup_tarball)
end end
tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{backup_files_realpath} -cf - .]].flatten tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{backup_files_realpath} -cf - .]].flatten
...@@ -47,11 +49,12 @@ module Backup ...@@ -47,11 +49,12 @@ module Backup
end end
unless pipeline_succeeded?(tar_status: status_list[0], gzip_status: status_list[1], output: output) unless pipeline_succeeded?(tar_status: status_list[0], gzip_status: status_list[1], output: output)
raise_custom_error raise_custom_error(backup_tarball)
end end
end end
def restore override :restore
def restore(backup_tarball)
backup_existing_files_dir backup_existing_files_dir
cmd_list = [%w[gzip -cd], %W[#{tar} --unlink-first --recursive-unlink -C #{app_files_realpath} -xf -]] cmd_list = [%w[gzip -cd], %W[#{tar} --unlink-first --recursive-unlink -C #{app_files_realpath} -xf -]]
...@@ -61,10 +64,6 @@ module Backup ...@@ -61,10 +64,6 @@ module Backup
end end
end end
def enabled
true
end
def tar def tar
if system(*%w[gtar --version], out: '/dev/null') if system(*%w[gtar --version], out: '/dev/null')
# It looks like we can get GNU tar by running 'gtar' # It looks like we can get GNU tar by running 'gtar'
...@@ -146,7 +145,7 @@ module Backup ...@@ -146,7 +145,7 @@ module Backup
end end
end end
def raise_custom_error def raise_custom_error(backup_tarball)
raise FileBackupError.new(app_files_realpath, backup_tarball) raise FileBackupError.new(app_files_realpath, backup_tarball)
end end
......
...@@ -15,7 +15,7 @@ module Backup ...@@ -15,7 +15,7 @@ module Backup
@storage_parallelism = storage_parallelism @storage_parallelism = storage_parallelism
end end
def start(type) def start(type, backup_repos_path)
raise Error, 'already started' if started? raise Error, 'already started' if started?
command = case type command = case type
...@@ -93,10 +93,6 @@ module Backup ...@@ -93,10 +93,6 @@ module Backup
@thread.present? @thread.present?
end end
def backup_repos_path
File.absolute_path(File.join(Gitlab.config.backup.path, 'repositories'))
end
def bin_path def bin_path
File.absolute_path(Gitlab.config.backup.gitaly_backup_path) File.absolute_path(Gitlab.config.backup.gitaly_backup_path)
end end
......
...@@ -7,10 +7,11 @@ module Backup ...@@ -7,10 +7,11 @@ module Backup
@progress = progress @progress = progress
end end
def start(type) def start(type, backup_repos_path)
raise Error, 'already started' if @type raise Error, 'already started' if @type
@type = type @type = type
@backup_repos_path = backup_repos_path
case type case type
when :create when :create
FileUtils.rm_rf(backup_repos_path) FileUtils.rm_rf(backup_repos_path)
...@@ -31,7 +32,7 @@ module Backup ...@@ -31,7 +32,7 @@ module Backup
backup_restore = BackupRestore.new( backup_restore = BackupRestore.new(
progress, progress,
repository_type.repository_for(container), repository_type.repository_for(container),
backup_repos_path @backup_repos_path
) )
case @type case @type
...@@ -52,10 +53,6 @@ module Backup ...@@ -52,10 +53,6 @@ module Backup
attr_reader :progress attr_reader :progress
def backup_repos_path
@backup_repos_path ||= File.join(Gitlab.config.backup.path, 'repositories')
end
class BackupRestore class BackupRestore
attr_accessor :progress, :repository, :backup_repos_path attr_accessor :progress, :repository, :backup_repos_path
......
...@@ -2,14 +2,11 @@ ...@@ -2,14 +2,11 @@
module Backup module Backup
class Lfs < Backup::Files class Lfs < Backup::Files
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'lfs', Settings.lfs.storage_path)
super('lfs', Settings.lfs.storage_path)
end end
override :human_name
def human_name def human_name
_('lfs objects') _('lfs objects')
end end
......
...@@ -2,37 +2,77 @@ ...@@ -2,37 +2,77 @@
module Backup module Backup
class Manager class Manager
ARCHIVES_TO_BACKUP = %w[uploads builds artifacts pages lfs terraform_state registry packages].freeze
FOLDERS_TO_BACKUP = %w[repositories db].freeze
FILE_NAME_SUFFIX = '_gitlab_backup.tar' FILE_NAME_SUFFIX = '_gitlab_backup.tar'
MANIFEST_NAME = 'backup_information.yml'
TaskDefinition = Struct.new(
:destination_path, # Where the task should put its backup file/dir.
:destination_optional, # `true` if the destination might not exist on a successful backup.
:cleanup_path, # Path to remove after a successful backup. Uses `destination_path` when not specified.
:task,
keyword_init: true
)
attr_reader :progress attr_reader :progress
def initialize(progress) def initialize(progress, definitions: nil)
@progress = progress @progress = progress
max_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_CONCURRENCY', 1).to_i max_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_CONCURRENCY', 1).to_i
max_storage_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 1).to_i max_storage_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 1).to_i
force = ENV['force'] == 'yes'
@tasks = {
'db' => Database.new(progress), @definitions = definitions || {
'repositories' => Repositories.new(progress, 'db' => TaskDefinition.new(
strategy: repository_backup_strategy, destination_path: 'db/database.sql.gz',
max_concurrency: max_concurrency, cleanup_path: 'db',
max_storage_concurrency: max_storage_concurrency), task: Database.new(progress, force: force)
'uploads' => Uploads.new(progress), ),
'builds' => Builds.new(progress), 'repositories' => TaskDefinition.new(
'artifacts' => Artifacts.new(progress), destination_path: 'repositories',
'pages' => Pages.new(progress), destination_optional: true,
'lfs' => Lfs.new(progress), task: Repositories.new(progress,
'terraform_state' => TerraformState.new(progress), strategy: repository_backup_strategy,
'registry' => Registry.new(progress), max_concurrency: max_concurrency,
'packages' => Packages.new(progress) max_storage_concurrency: max_storage_concurrency)
),
'uploads' => TaskDefinition.new(
destination_path: 'uploads.tar.gz',
task: Uploads.new(progress)
),
'builds' => TaskDefinition.new(
destination_path: 'builds.tar.gz',
task: Builds.new(progress)
),
'artifacts' => TaskDefinition.new(
destination_path: 'artifacts.tar.gz',
task: Artifacts.new(progress)
),
'pages' => TaskDefinition.new(
destination_path: 'pages.tar.gz',
task: Pages.new(progress)
),
'lfs' => TaskDefinition.new(
destination_path: 'lfs.tar.gz',
task: Lfs.new(progress)
),
'terraform_state' => TaskDefinition.new(
destination_path: 'terraform_state.tar.gz',
task: TerraformState.new(progress)
),
'registry' => TaskDefinition.new(
destination_path: 'registry.tar.gz',
task: Registry.new(progress)
),
'packages' => TaskDefinition.new(
destination_path: 'packages.tar.gz',
task: Packages.new(progress)
)
}.freeze }.freeze
end end
def create def create
@tasks.keys.each do |task_name| @definitions.keys.each do |task_name|
run_create_task(task_name) run_create_task(task_name)
end end
...@@ -54,11 +94,11 @@ module Backup ...@@ -54,11 +94,11 @@ module Backup
end end
def run_create_task(task_name) def run_create_task(task_name)
task = @tasks[task_name] definition = @definitions[task_name]
puts_time "Dumping #{task.human_name} ... ".color(:blue) puts_time "Dumping #{definition.task.human_name} ... ".color(:blue)
unless task.enabled unless definition.task.enabled
puts_time "[DISABLED]".color(:cyan) puts_time "[DISABLED]".color(:cyan)
return return
end end
...@@ -68,7 +108,8 @@ module Backup ...@@ -68,7 +108,8 @@ module Backup
return return
end end
task.dump definition.task.dump(File.join(Gitlab.config.backup.path, definition.destination_path))
puts_time "done".color(:green) puts_time "done".color(:green)
rescue Backup::DatabaseBackupError, Backup::FileBackupError => e rescue Backup::DatabaseBackupError, Backup::FileBackupError => e
...@@ -79,39 +120,7 @@ module Backup ...@@ -79,39 +120,7 @@ module Backup
cleanup_required = unpack cleanup_required = unpack
verify_backup_version verify_backup_version
unless skipped?('db') @definitions.keys.each do |task_name|
begin
unless ENV['force'] == 'yes'
warning = <<-MSG.strip_heredoc
Be sure to stop Puma, Sidekiq, and any other process that
connects to the database before proceeding. For Omnibus
installs, see the following link for more information:
https://docs.gitlab.com/ee/raketasks/backup_restore.html#restore-for-omnibus-gitlab-installations
Before restoring the database, we will remove all existing
tables to avoid future upgrade problems. Be aware that if you have
custom tables in the GitLab database these tables and all data will be
removed.
MSG
puts warning.color(:red)
Gitlab::TaskHelpers.ask_to_continue
puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
sleep(5)
end
# Drop all tables Load the schema to ensure we don't have any newer tables
# hanging out from a failed upgrade
puts_time 'Cleaning the database ... '.color(:blue)
Rake::Task['gitlab:db:drop_tables'].invoke
puts_time 'done'.color(:green)
run_restore_task('db')
rescue Gitlab::TaskAbortedByUserError
puts "Quitting...".color(:red)
exit 1
end
end
@tasks.except('db').keys.each do |task_name|
run_restore_task(task_name) unless skipped?(task_name) run_restore_task(task_name) unless skipped?(task_name)
end end
...@@ -130,25 +139,44 @@ module Backup ...@@ -130,25 +139,44 @@ module Backup
end end
def run_restore_task(task_name) def run_restore_task(task_name)
task = @tasks[task_name] definition = @definitions[task_name]
puts_time "Restoring #{task.human_name} ... ".color(:blue) puts_time "Restoring #{definition.task.human_name} ... ".color(:blue)
unless task.enabled unless definition.task.enabled
puts_time "[DISABLED]".color(:cyan) puts_time "[DISABLED]".color(:cyan)
return return
end end
task.restore warning = definition.task.pre_restore_warning
if warning.present?
puts_time warning.color(:red)
Gitlab::TaskHelpers.ask_to_continue
end
definition.task.restore(File.join(Gitlab.config.backup.path, definition.destination_path))
puts_time "done".color(:green) puts_time "done".color(:green)
warning = definition.task.post_restore_warning
if warning.present?
puts_time warning.color(:red)
Gitlab::TaskHelpers.ask_to_continue
end
rescue Gitlab::TaskAbortedByUserError
puts_time "Quitting...".color(:red)
exit 1
end end
private
def write_info def write_info
# Make sure there is a connection # Make sure there is a connection
ActiveRecord::Base.connection.reconnect! ActiveRecord::Base.connection.reconnect!
Dir.chdir(backup_path) do Dir.chdir(backup_path) do
File.open("#{backup_path}/backup_information.yml", "w+") do |file| File.open("#{backup_path}/#{MANIFEST_NAME}", "w+") do |file|
file << backup_information.to_yaml.gsub(/^---\n/, '') file << backup_information.to_yaml.gsub(/^---\n/, '')
end end
end end
...@@ -182,8 +210,11 @@ module Backup ...@@ -182,8 +210,11 @@ module Backup
upload = directory.files.create(create_attributes) upload = directory.files.create(create_attributes)
if upload if upload
progress.puts "done".color(:green) if upload.respond_to?(:encryption) && upload.encryption
upload progress.puts "done (encrypted with #{upload.encryption})".color(:green)
else
progress.puts "done".color(:green)
end
else else
puts "uploading backup to #{remote_directory} failed".color(:red) puts "uploading backup to #{remote_directory} failed".color(:red)
raise Backup::Error, 'Backup failed' raise Backup::Error, 'Backup failed'
...@@ -193,18 +224,19 @@ module Backup ...@@ -193,18 +224,19 @@ module Backup
def cleanup def cleanup
progress.print "Deleting tmp directories ... " progress.print "Deleting tmp directories ... "
backup_contents.each do |dir| remove_backup_path(MANIFEST_NAME)
next unless File.exist?(File.join(backup_path, dir)) @definitions.each do |_, definition|
remove_backup_path(definition.cleanup_path || definition.destination_path)
if FileUtils.rm_rf(File.join(backup_path, dir))
progress.puts "done".color(:green)
else
puts "deleting tmp directory '#{dir}' failed".color(:red)
raise Backup::Error, 'Backup failed'
end
end end
end end
def remove_backup_path(path)
return unless File.exist?(File.join(backup_path, path))
FileUtils.rm_rf(File.join(backup_path, path))
progress.puts "done".color(:green)
end
def remove_tmp def remove_tmp
# delete tmp inside backups # delete tmp inside backups
progress.print "Deleting backups/tmp ... " progress.print "Deleting backups/tmp ... "
...@@ -322,10 +354,8 @@ module Backup ...@@ -322,10 +354,8 @@ module Backup
settings[:skipped] && settings[:skipped].include?(item) || !enabled_task?(item) settings[:skipped] && settings[:skipped].include?(item) || !enabled_task?(item)
end end
private
def enabled_task?(task_name) def enabled_task?(task_name)
@tasks[task_name].enabled @definitions[task_name].task.enabled
end end
def backup_file?(file) def backup_file?(file)
...@@ -333,7 +363,7 @@ module Backup ...@@ -333,7 +363,7 @@ module Backup
end end
def non_tarred_backup? def non_tarred_backup?
File.exist?(File.join(backup_path, 'backup_information.yml')) File.exist?(File.join(backup_path, MANIFEST_NAME))
end end
def backup_path def backup_path
...@@ -380,19 +410,14 @@ module Backup ...@@ -380,19 +410,14 @@ module Backup
end end
def backup_contents def backup_contents
folders_to_backup + archives_to_backup + ["backup_information.yml"] [MANIFEST_NAME] + @definitions.reject do |name, definition|
end skipped?(name) ||
(definition.destination_optional && !File.exist?(File.join(backup_path, definition.destination_path)))
def archives_to_backup end.values.map(&:destination_path)
ARCHIVES_TO_BACKUP.map { |name| (name + ".tar.gz") unless skipped?(name) }.compact
end
def folders_to_backup
FOLDERS_TO_BACKUP.select { |name| !skipped?(name) && Dir.exist?(File.join(backup_path, name)) }
end end
def settings def settings
@settings ||= YAML.load_file("backup_information.yml") @settings ||= YAML.load_file(MANIFEST_NAME)
end end
def tar_file def tar_file
......
...@@ -2,14 +2,11 @@ ...@@ -2,14 +2,11 @@
module Backup module Backup
class Packages < Backup::Files class Packages < Backup::Files
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'packages', Settings.packages.storage_path, excludes: ['tmp'])
super('packages', Settings.packages.storage_path, excludes: ['tmp'])
end end
override :human_name
def human_name def human_name
_('packages') _('packages')
end end
......
...@@ -6,14 +6,11 @@ module Backup ...@@ -6,14 +6,11 @@ module Backup
# if some of these files are still there, we don't need them in the backup # if some of these files are still there, we don't need them in the backup
LEGACY_PAGES_TMP_PATH = '@pages.tmp' LEGACY_PAGES_TMP_PATH = '@pages.tmp'
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'pages', Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
super('pages', Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
end end
override :human_name
def human_name def human_name
_('pages') _('pages')
end end
......
...@@ -2,18 +2,16 @@ ...@@ -2,18 +2,16 @@
module Backup module Backup
class Registry < Backup::Files class Registry < Backup::Files
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'registry', Settings.registry.path)
super('registry', Settings.registry.path)
end end
override :human_name
def human_name def human_name
_('container registry images') _('container registry images')
end end
override :enabled
def enabled def enabled
Gitlab.config.registry.enabled Gitlab.config.registry.enabled
end end
......
...@@ -3,16 +3,20 @@ ...@@ -3,16 +3,20 @@
require 'yaml' require 'yaml'
module Backup module Backup
class Repositories class Repositories < Task
extend ::Gitlab::Utils::Override
def initialize(progress, strategy:, max_concurrency: 1, max_storage_concurrency: 1) def initialize(progress, strategy:, max_concurrency: 1, max_storage_concurrency: 1)
@progress = progress super(progress)
@strategy = strategy @strategy = strategy
@max_concurrency = max_concurrency @max_concurrency = max_concurrency
@max_storage_concurrency = max_storage_concurrency @max_storage_concurrency = max_storage_concurrency
end end
def dump override :dump
strategy.start(:create) def dump(path)
strategy.start(:create, path)
# gitaly-backup is designed to handle concurrency on its own. So we want # gitaly-backup is designed to handle concurrency on its own. So we want
# to avoid entering the buggy concurrency code here when gitaly-backup # to avoid entering the buggy concurrency code here when gitaly-backup
...@@ -50,8 +54,9 @@ module Backup ...@@ -50,8 +54,9 @@ module Backup
strategy.finish! strategy.finish!
end end
def restore override :restore
strategy.start(:restore) def restore(path)
strategy.start(:restore, path)
enqueue_consecutive enqueue_consecutive
ensure ensure
...@@ -61,17 +66,14 @@ module Backup ...@@ -61,17 +66,14 @@ module Backup
restore_object_pools restore_object_pools
end end
def enabled override :human_name
true
end
def human_name def human_name
_('repositories') _('repositories')
end end
private private
attr_reader :progress, :strategy, :max_concurrency, :max_storage_concurrency attr_reader :strategy, :max_concurrency, :max_storage_concurrency
def check_valid_storages! def check_valid_storages!
repository_storage_klasses.each do |klass| repository_storage_klasses.each do |klass|
......
# frozen_string_literal: true
module Backup
class Task
def initialize(progress)
@progress = progress
end
# human readable task name used for logging
def human_name
raise NotImplementedError
end
# dump task backup to `path`
def dump(path)
raise NotImplementedError
end
# restore task backup from `path`
def restore(path)
raise NotImplementedError
end
# a string returned here will be displayed to the user before calling #restore
def pre_restore_warning
end
# a string returned here will be displayed to the user after calling #restore
def post_restore_warning
end
# returns `true` when the task should be used
def enabled
true
end
private
attr_reader :progress
def puts_time(msg)
progress.puts "#{Time.zone.now} -- #{msg}"
Gitlab::BackupLogger.info(message: "#{Rainbow.uncolor(msg)}")
end
end
end
...@@ -2,14 +2,11 @@ ...@@ -2,14 +2,11 @@
module Backup module Backup
class TerraformState < Backup::Files class TerraformState < Backup::Files
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'terraform_state', Settings.terraform_state.storage_path, excludes: ['tmp'])
super('terraform_state', Settings.terraform_state.storage_path, excludes: ['tmp'])
end end
override :human_name
def human_name def human_name
_('terraform states') _('terraform states')
end end
......
...@@ -2,14 +2,11 @@ ...@@ -2,14 +2,11 @@
module Backup module Backup
class Uploads < Backup::Files class Uploads < Backup::Files
attr_reader :progress
def initialize(progress) def initialize(progress)
@progress = progress super(progress, 'uploads', File.join(Gitlab.config.uploads.storage_path, "uploads"), excludes: ['tmp'])
super('uploads', File.join(Gitlab.config.uploads.storage_path, "uploads"), excludes: ['tmp'])
end end
override :human_name
def human_name def human_name
_('uploads') _('uploads')
end end
......
...@@ -18,7 +18,7 @@ RSpec.describe Backup::Artifacts do ...@@ -18,7 +18,7 @@ RSpec.describe Backup::Artifacts do
expect(backup).to receive(:tar).and_return('blabla-tar') expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true) expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump backup.dump('artifacts.tar.gz')
end end
end end
end end
...@@ -6,25 +6,49 @@ RSpec.describe Backup::Database do ...@@ -6,25 +6,49 @@ RSpec.describe Backup::Database do
let(:progress) { StringIO.new } let(:progress) { StringIO.new }
let(:output) { progress.string } let(:output) { progress.string }
before do before(:all) do
allow(Gitlab::TaskHelpers).to receive(:ask_to_continue) Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/backup'
Rake.application.rake_require 'tasks/gitlab/shell'
Rake.application.rake_require 'tasks/gitlab/db'
Rake.application.rake_require 'tasks/cache'
end end
describe '#restore' do describe '#restore' do
let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1)] } let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1)] }
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s } let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
let(:force) { true }
subject { described_class.new(progress, filename: data) } subject { described_class.new(progress, force: force) }
before do before do
allow(subject).to receive(:pg_restore_cmd).and_return(cmd) allow(subject).to receive(:pg_restore_cmd).and_return(cmd)
end end
context 'when not forced' do
let(:force) { false }
it 'warns the user and waits' do
expect(subject).to receive(:sleep)
expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
subject.restore(data)
expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
end
it 'has a pre restore warning' do
expect(subject.pre_restore_warning).not_to be_nil
end
end
context 'with an empty .gz file' do context 'with an empty .gz file' do
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s } let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
it 'returns successfully' do it 'returns successfully' do
subject.restore expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
subject.restore(data)
expect(output).to include("Restoring PostgreSQL database") expect(output).to include("Restoring PostgreSQL database")
expect(output).to include("[DONE]") expect(output).to include("[DONE]")
...@@ -36,7 +60,9 @@ RSpec.describe Backup::Database do ...@@ -36,7 +60,9 @@ RSpec.describe Backup::Database do
let(:data) { Rails.root.join("spec/fixtures/big-image.png").to_s } let(:data) { Rails.root.join("spec/fixtures/big-image.png").to_s }
it 'raises a backup error' do it 'raises a backup error' do
expect { subject.restore }.to raise_error(Backup::Error) expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
expect { subject.restore(data) }.to raise_error(Backup::Error)
end end
end end
...@@ -45,12 +71,15 @@ RSpec.describe Backup::Database do ...@@ -45,12 +71,15 @@ RSpec.describe Backup::Database do
let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" } let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] } let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
it 'filters out noise from errors' do it 'filters out noise from errors and has a post restore warning' do
subject.restore expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
subject.restore(data)
expect(output).to include("ERRORS") expect(output).to include("ERRORS")
expect(output).not_to include(noise) expect(output).not_to include(noise)
expect(output).to include(visible_error) expect(output).to include(visible_error)
expect(subject.post_restore_warning).not_to be_nil
end end
end end
...@@ -66,7 +95,9 @@ RSpec.describe Backup::Database do ...@@ -66,7 +95,9 @@ RSpec.describe Backup::Database do
end end
it 'overrides default config values' do it 'overrides default config values' do
subject.restore expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
subject.restore(data)
expect(output).to include(%("PGHOST"=>"test.example.com")) expect(output).to include(%("PGHOST"=>"test.example.com"))
expect(output).to include(%("PGPASSWORD"=>"donotchange")) expect(output).to include(%("PGPASSWORD"=>"donotchange"))
......
...@@ -39,7 +39,7 @@ RSpec.describe Backup::Files do ...@@ -39,7 +39,7 @@ RSpec.describe Backup::Files do
end end
describe '#restore' do describe '#restore' do
subject { described_class.new('registry', '/var/gitlab-registry') } subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
let(:timestamp) { Time.utc(2017, 3, 22) } let(:timestamp) { Time.utc(2017, 3, 22) }
...@@ -58,11 +58,11 @@ RSpec.describe Backup::Files do ...@@ -58,11 +58,11 @@ RSpec.describe Backup::Files do
it 'moves all necessary files' do it 'moves all necessary files' do
allow(subject).to receive(:backup_existing_files).and_call_original allow(subject).to receive(:backup_existing_files).and_call_original
expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}")) expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}"))
subject.restore subject.restore('registry.tar.gz')
end end
it 'raises no errors' do it 'raises no errors' do
expect { subject.restore }.not_to raise_error expect { subject.restore('registry.tar.gz') }.not_to raise_error
end end
it 'calls tar command with unlink' do it 'calls tar command with unlink' do
...@@ -70,13 +70,13 @@ RSpec.describe Backup::Files do ...@@ -70,13 +70,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:run_pipeline!).with([%w(gzip -cd), %w(blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -)], any_args) expect(subject).to receive(:run_pipeline!).with([%w(gzip -cd), %w(blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -)], any_args)
expect(subject).to receive(:pipeline_succeeded?).and_return(true) expect(subject).to receive(:pipeline_succeeded?).and_return(true)
subject.restore subject.restore('registry.tar.gz')
end end
it 'raises an error on failure' do it 'raises an error on failure' do
expect(subject).to receive(:pipeline_succeeded?).and_return(false) expect(subject).to receive(:pipeline_succeeded?).and_return(false)
expect { subject.restore }.to raise_error(/Restore operation failed:/) expect { subject.restore('registry.tar.gz') }.to raise_error(/Restore operation failed:/)
end end
end end
...@@ -89,7 +89,7 @@ RSpec.describe Backup::Files do ...@@ -89,7 +89,7 @@ RSpec.describe Backup::Files do
it 'shows error message' do it 'shows error message' do
expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry") expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry")
subject.restore subject.restore('registry.tar.gz')
end end
end end
...@@ -104,13 +104,13 @@ RSpec.describe Backup::Files do ...@@ -104,13 +104,13 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:resource_busy_error).with("/var/gitlab-registry") expect(subject).to receive(:resource_busy_error).with("/var/gitlab-registry")
.and_call_original .and_call_original
expect { subject.restore }.to raise_error(/is a mountpoint/) expect { subject.restore('registry.tar.gz') }.to raise_error(/is a mountpoint/)
end end
end end
end end
describe '#dump' do describe '#dump' do
subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) } subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
before do before do
allow(subject).to receive(:run_pipeline!).and_return([[true, true], '']) allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
...@@ -118,14 +118,14 @@ RSpec.describe Backup::Files do ...@@ -118,14 +118,14 @@ RSpec.describe Backup::Files do
end end
it 'raises no errors' do it 'raises no errors' do
expect { subject.dump }.not_to raise_error expect { subject.dump('registry.tar.gz') }.not_to raise_error
end end
it 'excludes tmp dirs from archive' do it 'excludes tmp dirs from archive' do
expect(subject).to receive(:tar).and_return('blabla-tar') expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args) expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args)
subject.dump subject.dump('registry.tar.gz')
end end
it 'raises an error on failure' do it 'raises an error on failure' do
...@@ -133,7 +133,7 @@ RSpec.describe Backup::Files do ...@@ -133,7 +133,7 @@ RSpec.describe Backup::Files do
expect(subject).to receive(:pipeline_succeeded?).and_return(false) expect(subject).to receive(:pipeline_succeeded?).and_return(false)
expect do expect do
subject.dump subject.dump('registry.tar.gz')
end.to raise_error(/Failed to create compressed file/) end.to raise_error(/Failed to create compressed file/)
end end
...@@ -149,7 +149,7 @@ RSpec.describe Backup::Files do ...@@ -149,7 +149,7 @@ RSpec.describe Backup::Files do
.with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup)) .with(%w(rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup))
.and_return(['', 0]) .and_return(['', 0])
subject.dump subject.dump('registry.tar.gz')
end end
it 'retries if rsync fails due to vanishing files' do it 'retries if rsync fails due to vanishing files' do
...@@ -158,7 +158,7 @@ RSpec.describe Backup::Files do ...@@ -158,7 +158,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 24], ['', 0]) .and_return(['rsync failed', 24], ['', 0])
expect do expect do
subject.dump subject.dump('registry.tar.gz')
end.to output(/files vanished during rsync, retrying/).to_stdout end.to output(/files vanished during rsync, retrying/).to_stdout
end end
...@@ -168,7 +168,7 @@ RSpec.describe Backup::Files do ...@@ -168,7 +168,7 @@ RSpec.describe Backup::Files do
.and_return(['rsync failed', 1]) .and_return(['rsync failed', 1])
expect do expect do
subject.dump subject.dump('registry.tar.gz')
end.to output(/rsync failed/).to_stdout end.to output(/rsync failed/).to_stdout
.and raise_error(/Failed to create compressed file/) .and raise_error(/Failed to create compressed file/)
end end
...@@ -176,7 +176,7 @@ RSpec.describe Backup::Files do ...@@ -176,7 +176,7 @@ RSpec.describe Backup::Files do
end end
describe '#exclude_dirs' do describe '#exclude_dirs' do
subject { described_class.new('pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) } subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
it 'prepends a leading dot slash to tar excludes' do it 'prepends a leading dot slash to tar excludes' do
expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp']) expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
...@@ -188,7 +188,7 @@ RSpec.describe Backup::Files do ...@@ -188,7 +188,7 @@ RSpec.describe Backup::Files do
end end
describe '#run_pipeline!' do describe '#run_pipeline!' do
subject { described_class.new('registry', '/var/gitlab-registry') } subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'executes an Open3.pipeline for cmd_list' do it 'executes an Open3.pipeline for cmd_list' do
expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args) expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
...@@ -222,7 +222,7 @@ RSpec.describe Backup::Files do ...@@ -222,7 +222,7 @@ RSpec.describe Backup::Files do
end end
describe '#pipeline_succeeded?' do describe '#pipeline_succeeded?' do
subject { described_class.new('registry', '/var/gitlab-registry') } subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if both tar and gzip succeeeded' do it 'returns true if both tar and gzip succeeeded' do
expect( expect(
...@@ -262,7 +262,7 @@ RSpec.describe Backup::Files do ...@@ -262,7 +262,7 @@ RSpec.describe Backup::Files do
end end
describe '#tar_ignore_non_success?' do describe '#tar_ignore_non_success?' do
subject { described_class.new('registry', '/var/gitlab-registry') } subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
context 'if `tar` command exits with 1 exitstatus' do context 'if `tar` command exits with 1 exitstatus' do
it 'returns true' do it 'returns true' do
...@@ -310,7 +310,7 @@ RSpec.describe Backup::Files do ...@@ -310,7 +310,7 @@ RSpec.describe Backup::Files do
end end
describe '#noncritical_warning?' do describe '#noncritical_warning?' do
subject { described_class.new('registry', '/var/gitlab-registry') } subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
it 'returns true if given text matches noncritical warnings list' do it 'returns true if given text matches noncritical warnings list' do
expect( expect(
......
...@@ -5,6 +5,7 @@ require 'spec_helper' ...@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Backup::GitalyBackup do RSpec.describe Backup::GitalyBackup do
let(:max_parallelism) { nil } let(:max_parallelism) { nil }
let(:storage_parallelism) { nil } let(:storage_parallelism) { nil }
let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
let(:progress) do let(:progress) do
Tempfile.new('progress').tap do |progress| Tempfile.new('progress').tap do |progress|
...@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyBackup do
context 'unknown' do context 'unknown' do
it 'fails to start unknown' do it 'fails to start unknown' do
expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end end
end end
...@@ -42,7 +43,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -42,7 +43,7 @@ RSpec.describe Backup::GitalyBackup do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
subject.start(:create) subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN) subject.enqueue(project, Gitlab::GlRepository::DESIGN)
...@@ -50,11 +51,11 @@ RSpec.describe Backup::GitalyBackup do ...@@ -50,11 +51,11 @@ RSpec.describe Backup::GitalyBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish! subject.finish!
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle')) expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle')) expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle')) expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle')) expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle')) expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
end end
context 'parallel option set' do context 'parallel option set' do
...@@ -63,7 +64,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -63,7 +64,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3').and_call_original
subject.start(:create) subject.start(:create, destination)
subject.finish! subject.finish!
end end
end end
...@@ -74,7 +75,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -74,7 +75,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:create) subject.start(:create, destination)
subject.finish! subject.finish!
end end
end end
...@@ -82,7 +83,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -82,7 +83,7 @@ RSpec.describe Backup::GitalyBackup do
it 'raises when the exit code not zero' do it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false')) expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
subject.start(:create) subject.start(:create, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1') expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end end
end end
...@@ -114,7 +115,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -114,7 +115,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes through SSL envs' do it 'passes through SSL envs' do
expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original expect(Open3).to receive(:popen2).with(ssl_env, anything, 'create', '-path', anything).and_call_original
subject.start(:create) subject.start(:create, destination)
subject.finish! subject.finish!
end end
end end
...@@ -139,7 +140,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -139,7 +140,7 @@ RSpec.describe Backup::GitalyBackup do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
subject.start(:restore) subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN) subject.enqueue(project, Gitlab::GlRepository::DESIGN)
...@@ -162,7 +163,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -162,7 +163,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3').and_call_original
subject.start(:restore) subject.start(:restore, destination)
subject.finish! subject.finish!
end end
end end
...@@ -173,7 +174,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -173,7 +174,7 @@ RSpec.describe Backup::GitalyBackup do
it 'passes parallel option through' do it 'passes parallel option through' do
expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3').and_call_original
subject.start(:restore) subject.start(:restore, destination)
subject.finish! subject.finish!
end end
end end
...@@ -181,7 +182,7 @@ RSpec.describe Backup::GitalyBackup do ...@@ -181,7 +182,7 @@ RSpec.describe Backup::GitalyBackup do
it 'raises when the exit code not zero' do it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false')) expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
subject.start(:restore) subject.start(:restore, destination)
expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1') expect { subject.finish! }.to raise_error(::Backup::Error, 'gitaly-backup exit status 1')
end end
end end
......
...@@ -4,6 +4,7 @@ require 'spec_helper' ...@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Backup::GitalyRpcBackup do RSpec.describe Backup::GitalyRpcBackup do
let(:progress) { spy(:stdout) } let(:progress) { spy(:stdout) }
let(:destination) { File.join(Gitlab.config.backup.path, 'repositories') }
subject { described_class.new(progress) } subject { described_class.new(progress) }
...@@ -14,7 +15,7 @@ RSpec.describe Backup::GitalyRpcBackup do ...@@ -14,7 +15,7 @@ RSpec.describe Backup::GitalyRpcBackup do
context 'unknown' do context 'unknown' do
it 'fails to start unknown' do it 'fails to start unknown' do
expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') expect { subject.start(:unknown, destination) }.to raise_error(::Backup::Error, 'unknown backup type: unknown')
end end
end end
...@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyRpcBackup do ...@@ -27,7 +28,7 @@ RSpec.describe Backup::GitalyRpcBackup do
project_snippet = create(:project_snippet, :repository, project: project) project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner) personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
subject.start(:create) subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN) subject.enqueue(project, Gitlab::GlRepository::DESIGN)
...@@ -35,11 +36,11 @@ RSpec.describe Backup::GitalyRpcBackup do ...@@ -35,11 +36,11 @@ RSpec.describe Backup::GitalyRpcBackup do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish! subject.finish!
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle')) expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle')) expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle')) expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle')) expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle')) expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
end end
context 'failure' do context 'failure' do
...@@ -50,7 +51,7 @@ RSpec.describe Backup::GitalyRpcBackup do ...@@ -50,7 +51,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end end
it 'logs an appropriate message', :aggregate_failures do it 'logs an appropriate message', :aggregate_failures do
subject.start(:create) subject.start(:create, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish! subject.finish!
...@@ -90,7 +91,7 @@ RSpec.describe Backup::GitalyRpcBackup do ...@@ -90,7 +91,7 @@ RSpec.describe Backup::GitalyRpcBackup do
copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle') copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle') copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
subject.start(:restore) subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN) subject.enqueue(project, Gitlab::GlRepository::DESIGN)
...@@ -123,7 +124,7 @@ RSpec.describe Backup::GitalyRpcBackup do ...@@ -123,7 +124,7 @@ RSpec.describe Backup::GitalyRpcBackup do
repository repository
end end
subject.start(:restore) subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.enqueue(project, Gitlab::GlRepository::WIKI) subject.enqueue(project, Gitlab::GlRepository::WIKI)
subject.enqueue(project, Gitlab::GlRepository::DESIGN) subject.enqueue(project, Gitlab::GlRepository::DESIGN)
...@@ -141,7 +142,7 @@ RSpec.describe Backup::GitalyRpcBackup do ...@@ -141,7 +142,7 @@ RSpec.describe Backup::GitalyRpcBackup do
end end
it 'logs an appropriate message', :aggregate_failures do it 'logs an appropriate message', :aggregate_failures do
subject.start(:restore) subject.start(:restore, destination)
subject.enqueue(project, Gitlab::GlRepository::PROJECT) subject.enqueue(project, Gitlab::GlRepository::PROJECT)
subject.finish! subject.finish!
......
...@@ -20,7 +20,7 @@ RSpec.describe Backup::Lfs do ...@@ -20,7 +20,7 @@ RSpec.describe Backup::Lfs do
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true) expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump backup.dump('lfs.tar.gz')
end end
end end
end end
...@@ -6,16 +6,149 @@ RSpec.describe Backup::Manager do ...@@ -6,16 +6,149 @@ RSpec.describe Backup::Manager do
include StubENV include StubENV
let(:progress) { StringIO.new } let(:progress) { StringIO.new }
let(:definitions) { nil }
subject { described_class.new(progress) } subject { described_class.new(progress, definitions: definitions) }
before do before do
# Rspec fails with `uninitialized constant RSpec::Support::Differ` when it
# is trying to display a diff and `File.exist?` is stubbed. Adding a
# default stub fixes this.
allow(File).to receive(:exist?).and_call_original
allow(progress).to receive(:puts) allow(progress).to receive(:puts)
allow(progress).to receive(:print) allow(progress).to receive(:print)
end end
describe '#pack' do describe '#run_create_task' do
let(:expected_backup_contents) { %w(repositories db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml) } let(:enabled) { true }
let(:task) { instance_double(Backup::Task, human_name: 'my task', enabled: enabled) }
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
it 'calls the named task' do
expect(task).to receive(:dump)
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done')
subject.run_create_task('my_task')
end
describe 'disabled' do
let(:enabled) { false }
it 'informs the user' do
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]')
subject.run_create_task('my_task')
end
end
describe 'skipped' do
it 'informs the user' do
stub_env('SKIP', 'my_task')
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... ')
expect(Gitlab::BackupLogger).to receive(:info).with(message: '[SKIPPED]')
subject.run_create_task('my_task')
end
end
end
describe '#run_restore_task' do
let(:enabled) { true }
let(:pre_restore_warning) { nil }
let(:post_restore_warning) { nil }
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
let(:backup_information) { {} }
let(:task) do
instance_double(Backup::Task,
human_name: 'my task',
enabled: enabled,
pre_restore_warning: pre_restore_warning,
post_restore_warning: post_restore_warning)
end
before do
allow(YAML).to receive(:load_file).with('backup_information.yml')
.and_return(backup_information)
end
it 'calls the named task' do
expect(task).to receive(:restore)
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
subject.run_restore_task('my_task')
end
describe 'disabled' do
let(:enabled) { false }
it 'informs the user' do
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: '[DISABLED]').ordered
subject.run_restore_task('my_task')
end
end
describe 'pre_restore_warning' do
let(:pre_restore_warning) { 'Watch out!' }
it 'displays and waits for the user' do
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
expect(task).to receive(:restore)
subject.run_restore_task('my_task')
end
it 'does not continue when the user quits' do
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
expect do
subject.run_restore_task('my_task')
end.to raise_error(SystemExit)
end
end
describe 'post_restore_warning' do
let(:post_restore_warning) { 'Watch out!' }
it 'displays and waits for the user' do
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
expect(task).to receive(:restore)
subject.run_restore_task('my_task')
end
it 'does not continue when the user quits' do
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring my task ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'done').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
expect(task).to receive(:restore)
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
expect do
subject.run_restore_task('my_task')
end.to raise_error(SystemExit)
end
end
end
describe '#create' do
let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz task2.tar.gz} }
let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' } let(:tar_file) { '1546300800_2019_01_01_12.3_gitlab_backup.tar' }
let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } } let(:tar_system_options) { { out: [tar_file, 'w', Gitlab.config.backup.archive_permissions] } }
let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] } let(:tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, tar_system_options] }
...@@ -26,21 +159,27 @@ RSpec.describe Backup::Manager do ...@@ -26,21 +159,27 @@ RSpec.describe Backup::Manager do
} }
end end
let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true) }
let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true) }
let(:definitions) do
{
'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
}
end
before do before do
allow(ActiveRecord::Base.connection).to receive(:reconnect!) allow(ActiveRecord::Base.connection).to receive(:reconnect!)
allow(Kernel).to receive(:system).and_return(true) allow(Kernel).to receive(:system).and_return(true)
allow(YAML).to receive(:load_file).and_return(backup_information) allow(YAML).to receive(:load_file).and_return(backup_information)
::Backup::Manager::FOLDERS_TO_BACKUP.each do |folder|
allow(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, folder)).and_return(true)
end
allow(subject).to receive(:backup_information).and_return(backup_information) allow(subject).to receive(:backup_information).and_return(backup_information)
allow(subject).to receive(:upload) allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
end end
it 'executes tar' do it 'executes tar' do
subject.pack subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline) expect(Kernel).to have_received(:system).with(*tar_cmdline)
end end
...@@ -50,247 +189,400 @@ RSpec.describe Backup::Manager do ...@@ -50,247 +189,400 @@ RSpec.describe Backup::Manager do
it 'uses the given value as tar file name' do it 'uses the given value as tar file name' do
stub_env('BACKUP', '/ignored/path/custom') stub_env('BACKUP', '/ignored/path/custom')
subject.pack subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline) expect(Kernel).to have_received(:system).with(*tar_cmdline)
end end
end end
context 'when skipped is set in backup_information.yml' do context 'when skipped is set in backup_information.yml' do
let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} } let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
let(:backup_information) do let(:backup_information) do
{ {
backup_created_at: Time.zone.parse('2019-01-01'), backup_created_at: Time.zone.parse('2019-01-01'),
gitlab_version: '12.3', gitlab_version: '12.3',
skipped: ['repositories'] skipped: ['task2']
} }
end end
it 'executes tar' do it 'executes tar' do
subject.pack subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline) expect(Kernel).to have_received(:system).with(*tar_cmdline)
end end
end end
context 'when a directory does not exist' do context 'when the destination is optional' do
let(:expected_backup_contents) { %w{db uploads.tar.gz builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz terraform_state.tar.gz packages.tar.gz backup_information.yml} } let(:expected_backup_contents) { %w{backup_information.yml task1.tar.gz} }
let(:definitions) do
before do {
expect(Dir).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'repositories')).and_return(false) 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz', destination_optional: true)
}
end end
it 'executes tar' do it 'executes tar' do
subject.pack expect(File).to receive(:exist?).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz')).and_return(false)
subject.create # rubocop:disable Rails/SaveBang
expect(Kernel).to have_received(:system).with(*tar_cmdline) expect(Kernel).to have_received(:system).with(*tar_cmdline)
end end
end end
end
describe '#remove_tmp' do context 'many backup files' do
let(:path) { File.join(Gitlab.config.backup.path, 'tmp') } let(:files) do
[
'1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
'1451510000_2015_12_30_gitlab_backup.tar',
'1450742400_2015_12_22_gitlab_backup.tar',
'1449878400_gitlab_backup.tar',
'1449014400_gitlab_backup.tar',
'manual_gitlab_backup.tar'
]
end
before do before do
allow(FileUtils).to receive(:rm_rf).and_return(true) allow(Dir).to receive(:chdir).and_yield
end allow(Dir).to receive(:glob).and_return(files)
allow(FileUtils).to receive(:rm)
allow(Time).to receive(:now).and_return(Time.utc(2016))
end
it 'removes backups/tmp dir' do context 'when keep_time is zero' do
subject.remove_tmp before do
allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
expect(FileUtils).to have_received(:rm_rf).with(path) subject.create # rubocop:disable Rails/SaveBang
end end
it 'prints running task with a done confirmation' do it 'removes no files' do
subject.remove_tmp expect(FileUtils).not_to have_received(:rm)
end
expect(progress).to have_received(:print).with('Deleting backups/tmp ... ') it 'prints a skipped message' do
expect(progress).to have_received(:puts).with('done') expect(progress).to have_received(:puts).with('skipping')
end end
end end
describe '#remove_old' do context 'when no valid file is found' do
let(:files) do let(:files) do
[ [
'1451606400_2016_01_01_1.2.3_gitlab_backup.tar', '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6_gitlab_backup.tar', 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar', '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
'1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar', ]
'1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar', end
'1451510000_2015_12_30_gitlab_backup.tar',
'1450742400_2015_12_22_gitlab_backup.tar',
'1449878400_gitlab_backup.tar',
'1449014400_gitlab_backup.tar',
'manual_gitlab_backup.tar'
]
end
before do before do
allow(Dir).to receive(:chdir).and_yield allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
allow(Dir).to receive(:glob).and_return(files)
allow(FileUtils).to receive(:rm)
allow(Time).to receive(:now).and_return(Time.utc(2016))
end
context 'when keep_time is zero' do subject.create # rubocop:disable Rails/SaveBang
before do end
allow(Gitlab.config.backup).to receive(:keep_time).and_return(0)
subject.remove_old it 'removes no files' do
end expect(FileUtils).not_to have_received(:rm)
end
it 'removes no files' do it 'prints a done message' do
expect(FileUtils).not_to have_received(:rm) expect(progress).to have_received(:puts).with('done. (0 removed)')
end
end end
it 'prints a skipped message' do context 'when there are no files older than keep_time' do
expect(progress).to have_received(:puts).with('skipping') before do
end # Set to 30 days
end allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000)
context 'when no valid file is found' do subject.create # rubocop:disable Rails/SaveBang
let(:files) do end
[
'14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
'1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
]
end
before do it 'removes no files' do
allow(Gitlab.config.backup).to receive(:keep_time).and_return(1) expect(FileUtils).not_to have_received(:rm)
end
subject.remove_old it 'prints a done message' do
expect(progress).to have_received(:puts).with('done. (0 removed)')
end
end end
it 'removes no files' do context 'when keep_time is set to remove files' do
expect(FileUtils).not_to have_received(:rm) before do
end # Set to 1 second
allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
it 'prints a done message' do subject.create # rubocop:disable Rails/SaveBang
expect(progress).to have_received(:puts).with('done. (0 removed)') end
end
end
context 'when there are no files older than keep_time' do it 'removes matching files with a human-readable versioned timestamp' do
before do expect(FileUtils).to have_received(:rm).with(files[1])
# Set to 30 days expect(FileUtils).to have_received(:rm).with(files[2])
allow(Gitlab.config.backup).to receive(:keep_time).and_return(2592000) expect(FileUtils).to have_received(:rm).with(files[3])
end
subject.remove_old it 'removes matching files with a human-readable versioned timestamp with tagged EE' do
end expect(FileUtils).to have_received(:rm).with(files[4])
end
it 'removes no files' do it 'removes matching files with a human-readable non-versioned timestamp' do
expect(FileUtils).not_to have_received(:rm) expect(FileUtils).to have_received(:rm).with(files[5])
expect(FileUtils).to have_received(:rm).with(files[6])
end
it 'removes matching files without a human-readable timestamp' do
expect(FileUtils).to have_received(:rm).with(files[7])
expect(FileUtils).to have_received(:rm).with(files[8])
end
it 'does not remove files that are not old enough' do
expect(FileUtils).not_to have_received(:rm).with(files[0])
end
it 'does not remove non-matching files' do
expect(FileUtils).not_to have_received(:rm).with(files[9])
end
it 'prints a done message' do
expect(progress).to have_received(:puts).with('done. (8 removed)')
end
end end
it 'prints a done message' do context 'when removing a file fails' do
expect(progress).to have_received(:puts).with('done. (0 removed)') let(:file) { files[1] }
let(:message) { "Permission denied @ unlink_internal - #{file}" }
before do
allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
subject.create # rubocop:disable Rails/SaveBang
end
it 'removes the remaining expected files' do
expect(FileUtils).to have_received(:rm).with(files[4])
expect(FileUtils).to have_received(:rm).with(files[5])
expect(FileUtils).to have_received(:rm).with(files[6])
expect(FileUtils).to have_received(:rm).with(files[7])
expect(FileUtils).to have_received(:rm).with(files[8])
end
it 'sets the correct removed count' do
expect(progress).to have_received(:puts).with('done. (7 removed)')
end
it 'prints the error from file that could not be removed' do
expect(progress).to have_received(:puts).with(a_string_matching(message))
end
end end
end end
context 'when keep_time is set to remove files' do describe 'cloud storage' do
let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
let(:backup_filename) { File.basename(backup_file.path) }
before do before do
# Set to 1 second allow(subject).to receive(:tar_file).and_return(backup_filename)
allow(Gitlab.config.backup).to receive(:keep_time).and_return(1)
subject.remove_old stub_backup_setting(
end upload: {
connection: {
provider: 'AWS',
aws_access_key_id: 'id',
aws_secret_access_key: 'secret'
},
remote_directory: 'directory',
multipart_chunk_size: 104857600,
encryption: nil,
encryption_key: nil,
storage_class: nil
}
)
it 'removes matching files with a human-readable versioned timestamp' do Fog.mock!
expect(FileUtils).to have_received(:rm).with(files[1])
expect(FileUtils).to have_received(:rm).with(files[2])
expect(FileUtils).to have_received(:rm).with(files[3])
end
it 'removes matching files with a human-readable versioned timestamp with tagged EE' do # the Fog mock only knows about directories we create explicitly
expect(FileUtils).to have_received(:rm).with(files[4]) connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end end
it 'removes matching files with a human-readable non-versioned timestamp' do context 'target path' do
expect(FileUtils).to have_received(:rm).with(files[5]) it 'uses the tar filename by default' do
expect(FileUtils).to have_received(:rm).with(files[6]) expect_any_instance_of(Fog::Collection).to receive(:create)
end .with(hash_including(key: backup_filename, public: false))
.and_call_original
it 'removes matching files without a human-readable timestamp' do subject.create # rubocop:disable Rails/SaveBang
expect(FileUtils).to have_received(:rm).with(files[7]) end
expect(FileUtils).to have_received(:rm).with(files[8])
end
it 'does not remove files that are not old enough' do it 'adds the DIRECTORY environment variable if present' do
expect(FileUtils).not_to have_received(:rm).with(files[0]) stub_env('DIRECTORY', 'daily')
end
it 'does not remove non-matching files' do expect_any_instance_of(Fog::Collection).to receive(:create)
expect(FileUtils).not_to have_received(:rm).with(files[9]) .with(hash_including(key: "daily/#{backup_filename}", public: false))
end .and_call_original
it 'prints a done message' do subject.create # rubocop:disable Rails/SaveBang
expect(progress).to have_received(:puts).with('done. (8 removed)') end
end end
end
context 'when removing a file fails' do context 'with AWS with server side encryption' do
let(:file) { files[1] } let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
let(:message) { "Permission denied @ unlink_internal - #{file}" } let(:encryption_key) { nil }
let(:encryption) { nil }
let(:storage_options) { nil }
before do
stub_backup_setting(
upload: {
connection: {
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
},
remote_directory: 'directory',
multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
encryption: encryption,
encryption_key: encryption_key,
storage_options: storage_options,
storage_class: nil
}
)
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
before do context 'with SSE-S3 without using storage_options' do
allow(Gitlab.config.backup).to receive(:keep_time).and_return(1) let(:encryption) { 'AES256' }
allow(FileUtils).to receive(:rm).with(file).and_raise(Errno::EACCES, message)
subject.remove_old it 'sets encryption attributes' do
end subject.create # rubocop:disable Rails/SaveBang
expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
end
end
context 'with SSE-C (customer-provided keys) options' do
let(:encryption) { 'AES256' }
let(:encryption_key) { SecureRandom.hex }
it 'sets encryption attributes' do
subject.create # rubocop:disable Rails/SaveBang
it 'removes the remaining expected files' do expect(progress).to have_received(:puts).with("done (encrypted with AES256)")
expect(FileUtils).to have_received(:rm).with(files[4]) end
expect(FileUtils).to have_received(:rm).with(files[5]) end
expect(FileUtils).to have_received(:rm).with(files[6])
expect(FileUtils).to have_received(:rm).with(files[7]) context 'with SSE-KMS options' do
expect(FileUtils).to have_received(:rm).with(files[8]) let(:storage_options) do
{
server_side_encryption: 'aws:kms',
server_side_encryption_kms_key_id: 'arn:aws:kms:12345'
}
end
it 'sets encryption attributes' do
subject.create # rubocop:disable Rails/SaveBang
expect(progress).to have_received(:puts).with("done (encrypted with aws:kms)")
end
end
end end
it 'sets the correct removed count' do context 'with Google provider' do
expect(progress).to have_received(:puts).with('done. (7 removed)') before do
stub_backup_setting(
upload: {
connection: {
provider: 'Google',
google_storage_access_key_id: 'test-access-id',
google_storage_secret_access_key: 'secret'
},
remote_directory: 'directory',
multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
encryption: nil,
encryption_key: nil,
storage_class: nil
}
)
connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
it 'does not attempt to set ACL' do
expect_any_instance_of(Fog::Collection).to receive(:create)
.with(hash_excluding(public: false))
.and_call_original
subject.create # rubocop:disable Rails/SaveBang
end
end end
it 'prints the error from file that could not be removed' do context 'with AzureRM provider' do
expect(progress).to have_received(:puts).with(a_string_matching(message)) before do
stub_backup_setting(
upload: {
connection: {
provider: 'AzureRM',
azure_storage_account_name: 'test-access-id',
azure_storage_access_key: 'secret'
},
remote_directory: 'directory',
multipart_chunk_size: nil,
encryption: nil,
encryption_key: nil,
storage_class: nil
}
)
end
it 'loads the provider' do
expect { subject.create }.not_to raise_error # rubocop:disable Rails/SaveBang
end
end end
end end
end end
describe 'verify_backup_version' do describe '#restore' do
context 'on version mismatch' do let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
let(:gitlab_version) { Gitlab::VERSION } let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
let(:definitions) do
it 'stops the process' do {
allow(YAML).to receive(:load_file) 'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
.and_return({ gitlab_version: "not #{gitlab_version}" }) 'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
}
expect { subject.verify_backup_version }.to raise_error SystemExit
end
end end
context 'on version match' do let(:gitlab_version) { Gitlab::VERSION }
let(:gitlab_version) { Gitlab::VERSION } let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
gitlab_version: gitlab_version
}
end
it 'does nothing' do before do
allow(YAML).to receive(:load_file) Rake.application.rake_require 'tasks/gitlab/shell'
.and_return({ gitlab_version: "#{gitlab_version}" }) Rake.application.rake_require 'tasks/cache'
expect { subject.verify_backup_version }.not_to raise_error allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task1.tar.gz'))
end allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'task2.tar.gz'))
allow(YAML).to receive(:load_file).and_return(backup_information)
allow(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
allow(Rake::Task['cache:clear']).to receive(:invoke)
end end
end
describe '#unpack' do
context 'when there are no backup files in the directory' do context 'when there are no backup files in the directory' do
before do before do
allow(Dir).to receive(:glob).and_return([]) allow(Dir).to receive(:glob).and_return([])
end end
it 'fails the operation and prints an error' do it 'fails the operation and prints an error' do
expect { subject.unpack }.to raise_error SystemExit expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts) expect(progress).to have_received(:puts)
.with(a_string_matching('No backups found')) .with(a_string_matching('No backups found'))
end end
...@@ -307,13 +599,13 @@ RSpec.describe Backup::Manager do ...@@ -307,13 +599,13 @@ RSpec.describe Backup::Manager do
end end
it 'prints the list of available backups' do it 'prints the list of available backups' do
expect { subject.unpack }.to raise_error SystemExit expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts) expect(progress).to have_received(:puts)
.with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31')) .with(a_string_matching('1451606400_2016_01_01_1.2.3\n 1451520000_2015_12_31'))
end end
it 'fails the operation and prints an error' do it 'fails the operation and prints an error' do
expect { subject.unpack }.to raise_error SystemExit expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts) expect(progress).to have_received(:puts)
.with(a_string_matching('Found more than one backup')) .with(a_string_matching('Found more than one backup'))
end end
...@@ -332,7 +624,7 @@ RSpec.describe Backup::Manager do ...@@ -332,7 +624,7 @@ RSpec.describe Backup::Manager do
end end
it 'fails the operation and prints an error' do it 'fails the operation and prints an error' do
expect { subject.unpack }.to raise_error SystemExit expect { subject.restore }.to raise_error SystemExit
expect(File).to have_received(:exist?).with('wrong_gitlab_backup.tar') expect(File).to have_received(:exist?).with('wrong_gitlab_backup.tar')
expect(progress).to have_received(:puts) expect(progress).to have_received(:puts)
.with(a_string_matching('The backup file wrong_gitlab_backup.tar does not exist')) .with(a_string_matching('The backup file wrong_gitlab_backup.tar does not exist'))
...@@ -348,17 +640,46 @@ RSpec.describe Backup::Manager do ...@@ -348,17 +640,46 @@ RSpec.describe Backup::Manager do
) )
allow(File).to receive(:exist?).and_return(true) allow(File).to receive(:exist?).and_return(true)
allow(Kernel).to receive(:system).and_return(true) allow(Kernel).to receive(:system).and_return(true)
allow(YAML).to receive(:load_file).and_return(gitlab_version: Gitlab::VERSION)
stub_env('BACKUP', '/ignored/path/1451606400_2016_01_01_1.2.3') stub_env('BACKUP', '/ignored/path/1451606400_2016_01_01_1.2.3')
end end
it 'unpacks the file' do it 'unpacks the file' do
subject.unpack subject.restore
expect(Kernel).to have_received(:system) expect(Kernel).to have_received(:system)
.with("tar", "-xf", "1451606400_2016_01_01_1.2.3_gitlab_backup.tar") .with("tar", "-xf", "1451606400_2016_01_01_1.2.3_gitlab_backup.tar")
expect(progress).to have_received(:puts).with(a_string_matching('done')) end
context 'on version mismatch' do
let(:backup_information) do
{
backup_created_at: Time.zone.parse('2019-01-01'),
gitlab_version: "not #{gitlab_version}"
}
end
it 'stops the process' do
expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
.with(a_string_matching('GitLab version mismatch'))
end
end
describe 'tmp files' do
let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
before do
allow(FileUtils).to receive(:rm_rf).and_call_original
end
it 'removes backups/tmp dir' do
expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
subject.restore
expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
end
end end
end end
...@@ -375,184 +696,41 @@ RSpec.describe Backup::Manager do ...@@ -375,184 +696,41 @@ RSpec.describe Backup::Manager do
it 'selects the non-tarred backup to restore from' do it 'selects the non-tarred backup to restore from' do
expect(Kernel).not_to receive(:system) expect(Kernel).not_to receive(:system)
subject.unpack subject.restore
expect(progress).to have_received(:puts) expect(progress).to have_received(:puts)
.with(a_string_matching('Non tarred backup found ')) .with(a_string_matching('Non tarred backup found '))
end end
end
end
describe '#upload' do
let(:backup_file) { Tempfile.new('backup', Gitlab.config.backup.path) }
let(:backup_filename) { File.basename(backup_file.path) }
before do
allow(subject).to receive(:tar_file).and_return(backup_filename)
stub_backup_setting(
upload: {
connection: {
provider: 'AWS',
aws_access_key_id: 'id',
aws_secret_access_key: 'secret'
},
remote_directory: 'directory',
multipart_chunk_size: 104857600,
encryption: nil,
encryption_key: nil,
storage_class: nil
}
)
Fog.mock!
# the Fog mock only knows about directories we create explicitly
connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
context 'target path' do context 'on version mismatch' do
it 'uses the tar filename by default' do let(:backup_information) do
expect_any_instance_of(Fog::Collection).to receive(:create)
.with(hash_including(key: backup_filename, public: false))
.and_return(true)
subject.upload
end
it 'adds the DIRECTORY environment variable if present' do
stub_env('DIRECTORY', 'daily')
expect_any_instance_of(Fog::Collection).to receive(:create)
.with(hash_including(key: "daily/#{backup_filename}", public: false))
.and_return(true)
subject.upload
end
end
context 'with AWS with server side encryption' do
let(:connection) { ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys) }
let(:encryption_key) { nil }
let(:encryption) { nil }
let(:storage_options) { nil }
before do
stub_backup_setting(
upload: {
connection: {
provider: 'AWS',
aws_access_key_id: 'AWS_ACCESS_KEY_ID',
aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY'
},
remote_directory: 'directory',
multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
encryption: encryption,
encryption_key: encryption_key,
storage_options: storage_options,
storage_class: nil
}
)
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
context 'with SSE-S3 without using storage_options' do
let(:encryption) { 'AES256' }
it 'sets encryption attributes' do
result = subject.upload
expect(result.key).to be_present
expect(result.encryption).to eq('AES256')
expect(result.encryption_key).to be_nil
expect(result.kms_key_id).to be_nil
end
end
context 'with SSE-C (customer-provided keys) options' do
let(:encryption) { 'AES256' }
let(:encryption_key) { SecureRandom.hex }
it 'sets encryption attributes' do
result = subject.upload
expect(result.key).to be_present
expect(result.encryption).to eq(encryption)
expect(result.encryption_key).to eq(encryption_key)
expect(result.kms_key_id).to be_nil
end
end
context 'with SSE-KMS options' do
let(:storage_options) do
{ {
server_side_encryption: 'aws:kms', backup_created_at: Time.zone.parse('2019-01-01'),
server_side_encryption_kms_key_id: 'arn:aws:kms:12345' gitlab_version: "not #{gitlab_version}"
} }
end end
it 'sets encryption attributes' do it 'stops the process' do
result = subject.upload expect { subject.restore }.to raise_error SystemExit
expect(progress).to have_received(:puts)
expect(result.key).to be_present .with(a_string_matching('GitLab version mismatch'))
expect(result.encryption).to eq('aws:kms')
expect(result.kms_key_id).to eq('arn:aws:kms:12345')
end end
end end
end
context 'with Google provider' do describe 'tmp files' do
before do let(:path) { File.join(Gitlab.config.backup.path, 'tmp') }
stub_backup_setting(
upload: {
connection: {
provider: 'Google',
google_storage_access_key_id: 'test-access-id',
google_storage_secret_access_key: 'secret'
},
remote_directory: 'directory',
multipart_chunk_size: Gitlab.config.backup.upload.multipart_chunk_size,
encryption: nil,
encryption_key: nil,
storage_class: nil
}
)
connection = ::Fog::Storage.new(Gitlab.config.backup.upload.connection.symbolize_keys)
connection.directories.create(key: Gitlab.config.backup.upload.remote_directory) # rubocop:disable Rails/SaveBang
end
it 'does not attempt to set ACL' do before do
expect_any_instance_of(Fog::Collection).to receive(:create) allow(FileUtils).to receive(:rm_rf).and_call_original
.with(hash_excluding(public: false)) end
.and_return(true)
subject.upload it 'removes backups/tmp dir' do
end expect(FileUtils).to receive(:rm_rf).with(path).and_call_original
end
context 'with AzureRM provider' do subject.restore
before do
stub_backup_setting(
upload: {
connection: {
provider: 'AzureRM',
azure_storage_account_name: 'test-access-id',
azure_storage_access_key: 'secret'
},
remote_directory: 'directory',
multipart_chunk_size: nil,
encryption: nil,
encryption_key: nil,
storage_class: nil
}
)
end
it 'loads the provider' do expect(progress).to have_received(:print).with('Deleting backups/tmp ... ')
expect { subject.upload }.not_to raise_error end
end end
end end
end end
......
...@@ -21,7 +21,7 @@ RSpec.shared_examples 'backup object' do |setting| ...@@ -21,7 +21,7 @@ RSpec.shared_examples 'backup object' do |setting|
expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true) expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump backup.dump('backup_object.tar.gz')
end end
end end
end end
......
...@@ -19,7 +19,7 @@ RSpec.describe Backup::Pages do ...@@ -19,7 +19,7 @@ RSpec.describe Backup::Pages do
expect(subject).to receive(:tar).and_return('blabla-tar') expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(subject).to receive(:pipeline_succeeded?).and_return(true) expect(subject).to receive(:pipeline_succeeded?).and_return(true)
subject.dump subject.dump('pages.tar.gz')
end end
end end
end end
...@@ -8,6 +8,7 @@ RSpec.describe Backup::Repositories do ...@@ -8,6 +8,7 @@ RSpec.describe Backup::Repositories do
let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) } let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) }
let(:max_concurrency) { 1 } let(:max_concurrency) { 1 }
let(:max_storage_concurrency) { 1 } let(:max_storage_concurrency) { 1 }
let(:destination) { 'repositories' }
subject do subject do
described_class.new( described_class.new(
...@@ -26,9 +27,9 @@ RSpec.describe Backup::Repositories do ...@@ -26,9 +27,9 @@ RSpec.describe Backup::Repositories do
project_snippet = create(:project_snippet, :repository, project: project) project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner) personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
subject.dump subject.dump(destination)
expect(strategy).to have_received(:start).with(:create) expect(strategy).to have_received(:start).with(:create, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
...@@ -54,38 +55,38 @@ RSpec.describe Backup::Repositories do ...@@ -54,38 +55,38 @@ RSpec.describe Backup::Repositories do
it 'creates the expected number of threads' do it 'creates the expected number of threads' do
expect(Thread).not_to receive(:new) expect(Thread).not_to receive(:new)
expect(strategy).to receive(:start).with(:create) expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project| projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end end
expect(strategy).to receive(:finish!) expect(strategy).to receive(:finish!)
subject.dump subject.dump(destination)
end end
describe 'command failure' do describe 'command failure' do
it 'enqueue_project raises an error' do it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError) allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError)
expect { subject.dump }.to raise_error(IOError) expect { subject.dump(destination) }.to raise_error(IOError)
end end
it 'project query raises an error' do it 'project query raises an error' do
allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout) allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout) expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end end
end end
it 'avoids N+1 database queries' do it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do control_count = ActiveRecord::QueryRecorder.new do
subject.dump subject.dump(destination)
end.count end.count
create_list(:project, 2, :repository) create_list(:project, 2, :repository)
expect do expect do
subject.dump subject.dump(destination)
end.not_to exceed_query_limit(control_count) end.not_to exceed_query_limit(control_count)
end end
end end
...@@ -98,13 +99,13 @@ RSpec.describe Backup::Repositories do ...@@ -98,13 +99,13 @@ RSpec.describe Backup::Repositories do
it 'enqueues all projects sequentially' do it 'enqueues all projects sequentially' do
expect(Thread).not_to receive(:new) expect(Thread).not_to receive(:new)
expect(strategy).to receive(:start).with(:create) expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project| projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end end
expect(strategy).to receive(:finish!) expect(strategy).to receive(:finish!)
subject.dump subject.dump(destination)
end end
end end
...@@ -122,13 +123,13 @@ RSpec.describe Backup::Repositories do ...@@ -122,13 +123,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original .and_call_original
expect(strategy).to receive(:start).with(:create) expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project| projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end end
expect(strategy).to receive(:finish!) expect(strategy).to receive(:finish!)
subject.dump subject.dump(destination)
end end
context 'with extra max concurrency' do context 'with extra max concurrency' do
...@@ -139,13 +140,13 @@ RSpec.describe Backup::Repositories do ...@@ -139,13 +140,13 @@ RSpec.describe Backup::Repositories do
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
.and_call_original .and_call_original
expect(strategy).to receive(:start).with(:create) expect(strategy).to receive(:start).with(:create, destination)
projects.each do |project| projects.each do |project|
expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
end end
expect(strategy).to receive(:finish!) expect(strategy).to receive(:finish!)
subject.dump subject.dump(destination)
end end
end end
...@@ -153,33 +154,33 @@ RSpec.describe Backup::Repositories do ...@@ -153,33 +154,33 @@ RSpec.describe Backup::Repositories do
it 'enqueue_project raises an error' do it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).and_raise(IOError) allow(strategy).to receive(:enqueue).and_raise(IOError)
expect { subject.dump }.to raise_error(IOError) expect { subject.dump(destination) }.to raise_error(IOError)
end end
it 'project query raises an error' do it 'project query raises an error' do
allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout) allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout) expect { subject.dump(destination) }.to raise_error(ActiveRecord::StatementTimeout)
end end
context 'misconfigured storages' do context 'misconfigured storages' do
let(:storage_keys) { %w[test_second_storage] } let(:storage_keys) { %w[test_second_storage] }
it 'raises an error' do it 'raises an error' do
expect { subject.dump }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured') expect { subject.dump(destination) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
end end
end end
end end
it 'avoids N+1 database queries' do it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do control_count = ActiveRecord::QueryRecorder.new do
subject.dump subject.dump(destination)
end.count end.count
create_list(:project, 2, :repository) create_list(:project, 2, :repository)
expect do expect do
subject.dump subject.dump(destination)
end.not_to exceed_query_limit(control_count) end.not_to exceed_query_limit(control_count)
end end
end end
...@@ -192,9 +193,9 @@ RSpec.describe Backup::Repositories do ...@@ -192,9 +193,9 @@ RSpec.describe Backup::Repositories do
let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) } let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.first_owner) }
it 'calls enqueue for each repository type', :aggregate_failures do it 'calls enqueue for each repository type', :aggregate_failures do
subject.restore subject.restore(destination)
expect(strategy).to have_received(:start).with(:restore) expect(strategy).to have_received(:start).with(:restore, destination)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN) expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::DESIGN)
...@@ -208,7 +209,7 @@ RSpec.describe Backup::Repositories do ...@@ -208,7 +209,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, :failed) pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool pool_repository.delete_object_pool
subject.restore subject.restore(destination)
pool_repository.reload pool_repository.reload
expect(pool_repository).not_to be_failed expect(pool_repository).not_to be_failed
...@@ -219,7 +220,7 @@ RSpec.describe Backup::Repositories do ...@@ -219,7 +220,7 @@ RSpec.describe Backup::Repositories do
pool_repository = create(:pool_repository, state: :obsolete) pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil) pool_repository.update_column(:source_project_id, nil)
subject.restore subject.restore(destination)
pool_repository.reload pool_repository.reload
expect(pool_repository).to be_obsolete expect(pool_repository).to be_obsolete
...@@ -236,14 +237,14 @@ RSpec.describe Backup::Repositories do ...@@ -236,14 +237,14 @@ RSpec.describe Backup::Repositories do
end end
it 'shows the appropriate error' do it 'shows the appropriate error' do
subject.restore subject.restore(destination)
expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch") expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch") expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
end end
it 'removes the snippets from the DB' do it 'removes the snippets from the DB' do
expect { subject.restore }.to change(PersonalSnippet, :count).by(-1) expect { subject.restore(destination) }.to change(PersonalSnippet, :count).by(-1)
.and change(ProjectSnippet, :count).by(-1) .and change(ProjectSnippet, :count).by(-1)
.and change(SnippetRepository, :count).by(-2) .and change(SnippetRepository, :count).by(-2)
end end
...@@ -253,7 +254,7 @@ RSpec.describe Backup::Repositories do ...@@ -253,7 +254,7 @@ RSpec.describe Backup::Repositories do
shard_name = personal_snippet.repository.shard shard_name = personal_snippet.repository.shard
path = personal_snippet.disk_path + '.git' path = personal_snippet.disk_path + '.git'
subject.restore subject.restore(destination)
expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
end end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Backup::Task do
let(:progress) { StringIO.new }
subject { described_class.new(progress) }
describe '#human_name' do
it 'must be implemented by the subclass' do
expect { subject.human_name }.to raise_error(NotImplementedError)
end
end
describe '#dump' do
it 'must be implemented by the subclass' do
expect { subject.dump('some/path') }.to raise_error(NotImplementedError)
end
end
describe '#restore' do
it 'must be implemented by the subclass' do
expect { subject.restore('some/path') }.to raise_error(NotImplementedError)
end
end
end
...@@ -19,7 +19,7 @@ RSpec.describe Backup::Uploads do ...@@ -19,7 +19,7 @@ RSpec.describe Backup::Uploads do
expect(backup).to receive(:tar).and_return('blabla-tar') expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], '']) expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true) expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump backup.dump('uploads.tar.gz')
end end
end end
end end
...@@ -72,7 +72,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do ...@@ -72,7 +72,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
before do before do
allow(YAML).to receive(:load_file) allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: gitlab_version }) .and_return({ gitlab_version: gitlab_version })
expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
expect_next_instance_of(::Backup::Manager) do |instance| expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask| backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered expect(instance).to receive(:run_restore_task).with(subtask).ordered
...@@ -85,10 +84,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do ...@@ -85,10 +84,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
it 'invokes restoration on match' do it 'invokes restoration on match' do
expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout_from_any_process expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout_from_any_process
end end
it 'prints timestamps on messages' do
expect { run_rake_task('gitlab:backup:restore') }.to output(/.*\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\s[-+]\d{4}\s--\s.*/).to_stdout_from_any_process
end
end end
end end
...@@ -131,8 +126,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do ...@@ -131,8 +126,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(YAML).to receive(:load_file) allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: Gitlab::VERSION }) .and_return({ gitlab_version: Gitlab::VERSION })
expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
expect_next_instance_of(::Backup::Manager) do |instance| expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask| backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered expect(instance).to receive(:run_restore_task).with(subtask).ordered
...@@ -486,7 +479,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do ...@@ -486,7 +479,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(Rake::Task['gitlab:shell:setup']) allow(Rake::Task['gitlab:shell:setup'])
.to receive(:invoke).and_return(true) .to receive(:invoke).and_return(true)
expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
expect_next_instance_of(::Backup::Manager) do |instance| expect_next_instance_of(::Backup::Manager) do |instance|
(backup_types - %w{repositories uploads}).each do |subtask| (backup_types - %w{repositories uploads}).each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered expect(instance).to receive(:run_restore_task).with(subtask).ordered
...@@ -531,7 +523,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do ...@@ -531,7 +523,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
allow(Rake::Task['gitlab:shell:setup']) allow(Rake::Task['gitlab:shell:setup'])
.to receive(:invoke).and_return(true) .to receive(:invoke).and_return(true)
expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
expect_next_instance_of(::Backup::Manager) do |instance| expect_next_instance_of(::Backup::Manager) do |instance|
backup_types.each do |subtask| backup_types.each do |subtask|
expect(instance).to receive(:run_restore_task).with(subtask).ordered expect(instance).to receive(:run_restore_task).with(subtask).ordered
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment