Commit e95885f3 authored by bescoto's avatar bescoto

Major refactoring - avoid use of 'from XX import *' in favor of more

normal 'import XXX' syntax.  The previous way was an artifact from
earlier versions where the whole program fit in one file.


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@252 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent 7cfed788
...@@ -27,9 +27,7 @@ them over the usual 255 character limit. ...@@ -27,9 +27,7 @@ them over the usual 255 character limit.
""" """
import re import re
from log import * import Globals, log
from robust import *
import Globals
max_filename_length = 255 max_filename_length = 255
...@@ -55,8 +53,8 @@ def set_init_quote_vals_local(): ...@@ -55,8 +53,8 @@ def set_init_quote_vals_local():
global chars_to_quote, quoting_char global chars_to_quote, quoting_char
chars_to_quote = Globals.chars_to_quote chars_to_quote = Globals.chars_to_quote
if len(Globals.quoting_char) != 1: if len(Globals.quoting_char) != 1:
Log.FatalError("Expected single character for quoting char," log.Log.FatalError("Expected single character for quoting char,"
"got '%s' instead" % (Globals.quoting_char,)) "got '%s' instead" % (Globals.quoting_char,))
quoting_char = Globals.quoting_char quoting_char = Globals.quoting_char
init_quoting_regexps() init_quoting_regexps()
...@@ -68,8 +66,8 @@ def init_quoting_regexps(): ...@@ -68,8 +66,8 @@ def init_quoting_regexps():
re.compile("[%s%s]" % (chars_to_quote, quoting_char), re.S) re.compile("[%s%s]" % (chars_to_quote, quoting_char), re.S)
unquoting_regexp = re.compile("%s[0-9]{3}" % quoting_char, re.S) unquoting_regexp = re.compile("%s[0-9]{3}" % quoting_char, re.S)
except re.error: except re.error:
Log.FatalError("Error '%s' when processing char quote list %s" % log.Log.FatalError("Error '%s' when processing char quote list %s" %
(re.error, chars_to_quote)) (re.error, chars_to_quote))
def quote(path): def quote(path):
"""Return quoted version of given path """Return quoted version of given path
...@@ -95,18 +93,4 @@ def unquote_single(match): ...@@ -95,18 +93,4 @@ def unquote_single(match):
assert len(match.group()) == 4 assert len(match.group()) == 4
return chr(int(match.group()[1:])) return chr(int(match.group()[1:]))
def get_quoted_dir_children(rpath):
"""For rpath directory, return list of quoted children in dir"""
if not rpath.isdir(): return []
dir_pairs = [(unquote(filename), filename)
for filename in Robust.listrp(rpath)]
dir_pairs.sort() # sort by real index, not quoted part
child_list = []
for unquoted, filename in dir_pairs:
childrp = rpath.append(unquoted)
childrp.quote_path()
child_list.append(childrp)
return child_list
...@@ -246,7 +246,7 @@ def postset_regexp_local(name, re_string, flags): ...@@ -246,7 +246,7 @@ def postset_regexp_local(name, re_string, flags):
if flags: globals()[name] = re.compile(re_string, flags) if flags: globals()[name] = re.compile(re_string, flags)
else: globals()[name] = re.compile(re_string) else: globals()[name] = re.compile(re_string)
def set_select(source, rpath, tuplelist, quote_mode, *filelists): def set_select(source, Sel_Obj, rpath, tuplelist, quote_mode, *filelists):
"""Initialize select object using tuplelist """Initialize select object using tuplelist
Note that each list in filelists must each be passed as Note that each list in filelists must each be passed as
...@@ -256,12 +256,8 @@ def set_select(source, rpath, tuplelist, quote_mode, *filelists): ...@@ -256,12 +256,8 @@ def set_select(source, rpath, tuplelist, quote_mode, *filelists):
""" """
global select_source, select_mirror global select_source, select_mirror
sel = Select(rpath, quote_mode) sel = Sel_Obj(rpath, quote_mode)
sel.ParseArgs(tuplelist, filelists) sel.ParseArgs(tuplelist, filelists)
if source: select_source = sel if source: select_source = sel
else: select_mirror = sel else: select_mirror = sel
from rpath import * # kludge to avoid circularity - not needed in this module
from log import * # another kludge
from selection import *
...@@ -32,7 +32,7 @@ side. The source side should only transmit inode information. ...@@ -32,7 +32,7 @@ side. The source side should only transmit inode information.
from __future__ import generators from __future__ import generators
import cPickle import cPickle
import Globals, Time, TempFile, rpath, log, robust
# In all of these lists of indicies are the values. The keys in # In all of these lists of indicies are the values. The keys in
# _inode_ ones are (inode, devloc) pairs. # _inode_ ones are (inode, devloc) pairs.
...@@ -138,8 +138,8 @@ def restore_link(index, rpath): ...@@ -138,8 +138,8 @@ def restore_link(index, rpath):
for linked_index in _src_index_indicies[index]: for linked_index in _src_index_indicies[index]:
if linked_index in _restore_index_path: if linked_index in _restore_index_path:
srcpath = _restore_index_path[linked_index] srcpath = _restore_index_path[linked_index]
Log("Restoring %s by hard linking to %s" % log.Log("Restoring %s by hard linking to %s" %
(rpath.path, srcpath), 6) (rpath.path, srcpath), 6)
rpath.hardlink(srcpath) rpath.hardlink(srcpath)
return 1 return 1
_restore_index_path[index] = rpath.path _restore_index_path[index] = rpath.path
...@@ -148,8 +148,8 @@ def restore_link(index, rpath): ...@@ -148,8 +148,8 @@ def restore_link(index, rpath):
def link_rp(src_rorp, dest_rpath, dest_root = None): def link_rp(src_rorp, dest_rpath, dest_root = None):
"""Make dest_rpath into a link analogous to that of src_rorp""" """Make dest_rpath into a link analogous to that of src_rorp"""
if not dest_root: dest_root = dest_rpath # use base of dest_rpath if not dest_root: dest_root = dest_rpath # use base of dest_rpath
dest_link_rpath = RPath(dest_root.conn, dest_root.base, dest_link_rpath = rpath.RPath(dest_root.conn, dest_root.base,
get_indicies(src_rorp, 1)[0]) get_indicies(src_rorp, 1)[0])
dest_rpath.hardlink(dest_link_rpath.path) dest_rpath.hardlink(dest_link_rpath.path)
def write_linkdict(rpath, dict, compress = None): def write_linkdict(rpath, dict, compress = None):
...@@ -161,13 +161,13 @@ def write_linkdict(rpath, dict, compress = None): ...@@ -161,13 +161,13 @@ def write_linkdict(rpath, dict, compress = None):
""" """
assert (Globals.isbackup_writer and assert (Globals.isbackup_writer and
rpath.conn is Globals.local_connection) rpath.conn is Globals.local_connection)
tf = TempFileManager.new(rpath) tf = TempFile.new(rpath)
def init(): def init():
fp = tf.open("wb", compress) fp = tf.open("wb", compress)
cPickle.dump(dict, fp) cPickle.dump(dict, fp)
assert not fp.close() assert not fp.close()
tf.setdata() tf.setdata()
Robust.make_tf_robustaction(init, (tf,), (rpath,)).execute() robust.make_tf_robustaction(init, (tf,), (rpath,)).execute()
def get_linkrp(data_rpath, time, prefix): def get_linkrp(data_rpath, time, prefix):
"""Return RPath of linkdata, or None if cannot find""" """Return RPath of linkdata, or None if cannot find"""
...@@ -191,7 +191,7 @@ def final_writedata(): ...@@ -191,7 +191,7 @@ def final_writedata():
"""Write final checkpoint data to rbdir after successful backup""" """Write final checkpoint data to rbdir after successful backup"""
global final_inc global final_inc
if _src_index_indicies: if _src_index_indicies:
Log("Writing hard link data", 6) log.Log("Writing hard link data", 6)
if Globals.compression: if Globals.compression:
final_inc = Globals.rbdir.append("hardlink_data.%s.data.gz" % final_inc = Globals.rbdir.append("hardlink_data.%s.data.gz" %
Time.curtimestr) Time.curtimestr)
...@@ -218,7 +218,7 @@ def final_checkpoint(data_rpath): ...@@ -218,7 +218,7 @@ def final_checkpoint(data_rpath):
after every 20 seconds or whatever, but just at the end. after every 20 seconds or whatever, but just at the end.
""" """
Log("Writing intermediate hard link data to disk", 2) log.Log("Writing intermediate hard link data to disk", 2)
src_inode_rp = data_rpath.append("hardlink_source_inode_checkpoint." src_inode_rp = data_rpath.append("hardlink_source_inode_checkpoint."
"%s.data" % Time.curtimestr) "%s.data" % Time.curtimestr)
src_index_rp = data_rpath.append("hardlink_source_index_checkpoint." src_index_rp = data_rpath.append("hardlink_source_index_checkpoint."
...@@ -251,7 +251,7 @@ def retrieve_checkpoint(data_rpath, time): ...@@ -251,7 +251,7 @@ def retrieve_checkpoint(data_rpath, time):
dest_index = get_linkdata(data_rpath, time, dest_index = get_linkdata(data_rpath, time,
"hardlink_dest_index_checkpoint") "hardlink_dest_index_checkpoint")
except cPickle.UnpicklingError: except cPickle.UnpicklingError:
Log("Unpickling Error", 2) log.Log("Unpickling Error", 2)
return None return None
if (src_inode is None or src_index is None or if (src_inode is None or src_index is None or
dest_inode is None or dest_index is None): return None dest_inode is None or dest_index is None): return None
...@@ -271,7 +271,3 @@ def remove_all_checkpoints(): ...@@ -271,7 +271,3 @@ def remove_all_checkpoints():
rp.delete() rp.delete()
from log import *
from robust import *
from rpath import *
import Globals, Time
...@@ -20,16 +20,10 @@ ...@@ -20,16 +20,10 @@
"""Start (and end) here - read arguments, set global settings, etc.""" """Start (and end) here - read arguments, set global settings, etc."""
from __future__ import generators from __future__ import generators
import getopt, sys, re import getopt, sys, re, os
from log import * from log import Log
from lazy import * import Globals, Time, SetConnections, selection, robust, rpath, \
from connection import * manage, highlevel, connection, restore, FilenameMapping, Security
from rpath import *
from robust import *
from restore import *
from highlevel import *
from manage import *
import Globals, Time, SetConnections
action = None action = None
...@@ -164,7 +158,7 @@ def set_action(): ...@@ -164,7 +158,7 @@ def set_action():
if l == 0: commandline_error("No arguments given") if l == 0: commandline_error("No arguments given")
elif l == 1: action = "restore" elif l == 1: action = "restore"
elif l == 2: elif l == 2:
if RPath(Globals.local_connection, args[0]).isincfile(): if rpath.RPath(Globals.local_connection, args[0]).isincfile():
action = "restore" action = "restore"
else: action = "backup" else: action = "backup"
else: commandline_error("Too many arguments given") else: commandline_error("Too many arguments given")
...@@ -207,13 +201,14 @@ def misc_setup(rps): ...@@ -207,13 +201,14 @@ def misc_setup(rps):
Globals.postset_regexp('no_compression_regexp', Globals.postset_regexp('no_compression_regexp',
Globals.no_compression_regexp_string) Globals.no_compression_regexp_string)
for conn in Globals.connections: Robust.install_signal_handlers() for conn in Globals.connections: robust.install_signal_handlers()
def take_action(rps): def take_action(rps):
"""Do whatever action says""" """Do whatever action says"""
if action == "server": PipeConnection(sys.stdin, sys.stdout).Server() if action == "server":
connection.PipeConnection(sys.stdin, sys.stdout).Server()
elif action == "backup": Backup(rps[0], rps[1]) elif action == "backup": Backup(rps[0], rps[1])
elif action == "restore": restore(*rps) elif action == "restore": Restore(*rps)
elif action == "restore-as-of": RestoreAsOf(rps[0], rps[1]) elif action == "restore-as-of": RestoreAsOf(rps[0], rps[1])
elif action == "test-server": SetConnections.TestConnections() elif action == "test-server": SetConnections.TestConnections()
elif action == "list-changed-since": ListChangedSince(rps[0]) elif action == "list-changed-since": ListChangedSince(rps[0])
...@@ -247,14 +242,16 @@ def Backup(rpin, rpout): ...@@ -247,14 +242,16 @@ def Backup(rpin, rpout):
backup_init_dirs(rpin, rpout) backup_init_dirs(rpin, rpout)
if prevtime: if prevtime:
Time.setprevtime(prevtime) Time.setprevtime(prevtime)
HighLevel.Mirror_and_increment(rpin, rpout, incdir) highlevel.HighLevel.Mirror_and_increment(rpin, rpout, incdir)
else: HighLevel.Mirror(rpin, rpout, incdir) else: highlevel.HighLevel.Mirror(rpin, rpout, incdir)
rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout) rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout)
def backup_init_select(rpin, rpout): def backup_init_select(rpin, rpout):
"""Create Select objects on source and dest connections""" """Create Select objects on source and dest connections"""
rpin.conn.Globals.set_select(1, rpin, select_opts, None, *select_files) rpin.conn.Globals.set_select(1, selection.Select,
rpout.conn.Globals.set_select(0, rpout, select_mirror_opts, 1) rpin, select_opts, None, *select_files)
rpout.conn.Globals.set_select(0, selection.Select,
rpout, select_mirror_opts, 1)
def backup_init_dirs(rpin, rpout): def backup_init_dirs(rpin, rpout):
"""Make sure rpin and rpout are valid, init data dir and logging""" """Make sure rpin and rpout are valid, init data dir and logging"""
...@@ -273,7 +270,7 @@ def backup_init_dirs(rpin, rpout): ...@@ -273,7 +270,7 @@ def backup_init_dirs(rpin, rpout):
datadir = rpout.append("rdiff-backup-data") datadir = rpout.append("rdiff-backup-data")
SetConnections.UpdateGlobal('rbdir', datadir) SetConnections.UpdateGlobal('rbdir', datadir)
incdir = RPath(rpout.conn, os.path.join(datadir.path, "increments")) incdir = rpath.RPath(rpout.conn, os.path.join(datadir.path, "increments"))
prevtime = backup_get_mirrortime() prevtime = backup_get_mirrortime()
if rpout.lstat(): if rpout.lstat():
...@@ -336,14 +333,14 @@ def backup_touch_curmirror_local(rpin, rpout): ...@@ -336,14 +333,14 @@ def backup_touch_curmirror_local(rpin, rpout):
""" """
datadir = Globals.rbdir datadir = Globals.rbdir
map(RPath.delete, backup_get_mirrorrps()) map(rpath.RPath.delete, backup_get_mirrorrps())
mirrorrp = datadir.append("current_mirror.%s.%s" % (Time.curtimestr, mirrorrp = datadir.append("current_mirror.%s.%s" % (Time.curtimestr,
"data")) "data"))
Log("Touching mirror marker %s" % mirrorrp.path, 6) Log("Touching mirror marker %s" % mirrorrp.path, 6)
mirrorrp.touch() mirrorrp.touch()
RPath.copy_attribs(rpin, rpout) rpath.copy_attribs(rpin, rpout)
def restore(src_rp, dest_rp = None): def Restore(src_rp, dest_rp = None):
"""Main restoring function """Main restoring function
Here src_rp should be an increment file, and if dest_rp is Here src_rp should be an increment file, and if dest_rp is
...@@ -373,7 +370,7 @@ def restore_common(rpin, target, time): ...@@ -373,7 +370,7 @@ def restore_common(rpin, target, time):
inc_rpath = datadir.append_path('increments', index) inc_rpath = datadir.append_path('increments', index)
restore_init_select(mirror_root, target) restore_init_select(mirror_root, target)
restore_start_log(rpin, target, time) restore_start_log(rpin, target, time)
Restore.Restore(inc_rpath, mirror, target, time) restore.Restore(inc_rpath, mirror, target, time)
Log("Restore ended", 4) Log("Restore ended", 4)
def restore_start_log(rpin, target, time): def restore_start_log(rpin, target, time):
...@@ -398,8 +395,8 @@ def restore_check_paths(rpin, rpout, restoreasof = None): ...@@ -398,8 +395,8 @@ def restore_check_paths(rpin, rpout, restoreasof = None):
Try restoring from an increment file (the filenames look like Try restoring from an increment file (the filenames look like
"foobar.2001-09-01T04:49:04-07:00.diff").""" % rpin.path) "foobar.2001-09-01T04:49:04-07:00.diff").""" % rpin.path)
if not rpout: rpout = RPath(Globals.local_connection, if not rpout: rpout = rpath.RPath(Globals.local_connection,
rpin.getincbase_str()) rpin.getincbase_str())
if rpout.lstat(): if rpout.lstat():
Log.FatalError("Restore target %s already exists, " Log.FatalError("Restore target %s already exists, "
"and will not be overwritten." % rpout.path) "and will not be overwritten." % rpout.path)
...@@ -413,8 +410,9 @@ def restore_init_select(rpin, rpout): ...@@ -413,8 +410,9 @@ def restore_init_select(rpin, rpout):
the restore operation isn't. the restore operation isn't.
""" """
Globals.set_select(1, rpin, select_mirror_opts, None) Globals.set_select(1, selection.Select, rpin, select_mirror_opts, None)
Globals.set_select(0, rpout, select_opts, None, *select_files) Globals.set_select(0, selection.Select,
rpout, select_opts, None, *select_files)
def restore_get_root(rpin): def restore_get_root(rpin):
"""Return (mirror root, index) and set the data dir """Return (mirror root, index) and set the data dir
...@@ -438,7 +436,7 @@ def restore_get_root(rpin): ...@@ -438,7 +436,7 @@ def restore_get_root(rpin):
i = len(pathcomps) i = len(pathcomps)
while i >= 2: while i >= 2:
parent_dir = RPath(rpin.conn, "/".join(pathcomps[:i])) parent_dir = rpath.RPath(rpin.conn, "/".join(pathcomps[:i]))
if (parent_dir.isdir() and if (parent_dir.isdir() and
"rdiff-backup-data" in parent_dir.listdir()): break "rdiff-backup-data" in parent_dir.listdir()): break
i = i-1 i = i-1
...@@ -467,11 +465,11 @@ def ListIncrements(rp): ...@@ -467,11 +465,11 @@ def ListIncrements(rp):
mirror_root.append_path("rdiff-backup-data") mirror_root.append_path("rdiff-backup-data")
mirrorrp = mirror_root.new_index(index) mirrorrp = mirror_root.new_index(index)
inc_rpath = datadir.append_path('increments', index) inc_rpath = datadir.append_path('increments', index)
incs = Restore.get_inclist(inc_rpath) incs = restore.get_inclist(inc_rpath)
mirror_time = Restore.get_mirror_time() mirror_time = restore.get_mirror_time()
if Globals.parsable_output: if Globals.parsable_output:
print Manage.describe_incs_parsable(incs, mirror_time, mirrorrp) print manage.describe_incs_parsable(incs, mirror_time, mirrorrp)
else: print Manage.describe_incs_human(incs, mirror_time, mirrorrp) else: print manage.describe_incs_human(incs, mirror_time, mirrorrp)
def CalculateAverage(rps): def CalculateAverage(rps):
...@@ -495,7 +493,7 @@ def RemoveOlderThan(rootrp): ...@@ -495,7 +493,7 @@ def RemoveOlderThan(rootrp):
Log("Deleting increment(s) before %s" % timep, 4) Log("Deleting increment(s) before %s" % timep, 4)
times_in_secs = map(lambda inc: Time.stringtotime(inc.getinctime()), times_in_secs = map(lambda inc: Time.stringtotime(inc.getinctime()),
Restore.get_inclist(datadir.append("increments"))) restore.get_inclist(datadir.append("increments")))
times_in_secs = filter(lambda t: t < time, times_in_secs) times_in_secs = filter(lambda t: t < time, times_in_secs)
if not times_in_secs: if not times_in_secs:
Log.FatalError("No increments older than %s found" % timep) Log.FatalError("No increments older than %s found" % timep)
...@@ -510,7 +508,7 @@ def RemoveOlderThan(rootrp): ...@@ -510,7 +508,7 @@ def RemoveOlderThan(rootrp):
if len(times_in_secs) == 1: if len(times_in_secs) == 1:
Log("Deleting increment at time:\n" + inc_pretty_time, 3) Log("Deleting increment at time:\n" + inc_pretty_time, 3)
else: Log("Deleting increments at times:\n" + inc_pretty_time, 3) else: Log("Deleting increments at times:\n" + inc_pretty_time, 3)
Manage.delete_earlier_than(datadir, time) manage.delete_earlier_than(datadir, time)
def ListChangedSince(rp): def ListChangedSince(rp):
...@@ -519,12 +517,12 @@ def ListChangedSince(rp): ...@@ -519,12 +517,12 @@ def ListChangedSince(rp):
except Time.TimeException, exc: Log.FatalError(str(exc)) except Time.TimeException, exc: Log.FatalError(str(exc))
mirror_root, index = restore_get_root(rp) mirror_root, index = restore_get_root(rp)
Globals.rbdir = datadir = mirror_root.append_path("rdiff-backup-data") Globals.rbdir = datadir = mirror_root.append_path("rdiff-backup-data")
mirror_time = Restore.get_mirror_time() mirror_time = restore.get_mirror_time()
def get_rids_recursive(rid): def get_rids_recursive(rid):
"""Yield all the rids under rid that have inc newer than rest_time""" """Yield all the rids under rid that have inc newer than rest_time"""
yield rid yield rid
for sub_rid in Restore.yield_rids(rid, rest_time, mirror_time): for sub_rid in restore.yield_rids(rid, rest_time, mirror_time):
for sub_sub_rid in get_rids_recursive(sub_rid): yield sub_sub_rid for sub_sub_rid in get_rids_recursive(sub_rid): yield sub_sub_rid
def determineChangeType(incList): def determineChangeType(incList):
...@@ -538,8 +536,8 @@ def ListChangedSince(rp): ...@@ -538,8 +536,8 @@ def ListChangedSince(rp):
else: return "Unknown!" else: return "Unknown!"
inc_rpath = datadir.append_path('increments', index) inc_rpath = datadir.append_path('increments', index)
inc_list = Restore.get_inclist(inc_rpath) inc_list = restore.get_inclist(inc_rpath)
root_rid = RestoreIncrementData(index, inc_rpath, inc_list) root_rid = restore.RestoreIncrementData(index, inc_rpath, inc_list)
for rid in get_rids_recursive(root_rid): for rid in get_rids_recursive(root_rid):
if rid.inc_list: if rid.inc_list:
if not rid.index: path = "." if not rid.index: path = "."
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
"""Misc statistics methods, pertaining to dir and session stat files""" """Misc statistics methods, pertaining to dir and session stat files"""
from statistics import * import time
import Globals, Hardlink, increment, log, statistics, Time
# This is the RPath of the directory statistics file, and the # This is the RPath of the directory statistics file, and the
# associated open file. It will hold a line of statistics for # associated open file. It will hold a line of statistics for
...@@ -34,7 +34,7 @@ _dir_stats_header = """# rdiff-backup directory statistics file ...@@ -34,7 +34,7 @@ _dir_stats_header = """# rdiff-backup directory statistics file
# #
# Each line is in the following format: # Each line is in the following format:
# RelativeDirName %s # RelativeDirName %s
""" % " ".join(StatsObj.stat_file_attrs) """ % " ".join(statistics.StatsObj.stat_file_attrs)
def open_dir_stats_file(): def open_dir_stats_file():
"""Open directory statistics file, write header""" """Open directory statistics file, write header"""
...@@ -43,12 +43,12 @@ def open_dir_stats_file(): ...@@ -43,12 +43,12 @@ def open_dir_stats_file():
if Globals.compression: suffix = "data.gz" if Globals.compression: suffix = "data.gz"
else: suffix = "data" else: suffix = "data"
_dir_stats_rp = Inc.get_inc(Globals.rbdir.append("directory_statistics"), _dir_stats_rp = increment.get_inc(
Time.curtime, suffix) Globals.rbdir.append("directory_statistics"), Time.curtime, suffix)
if _dir_stats_rp.lstat(): if _dir_stats_rp.lstat():
Log("Warning, statistics file %s already exists, appending" % log.Log("Warning, statistics file %s already exists, appending" %
_dir_stats_rp.path, 2) _dir_stats_rp.path, 2)
_dir_stats_fp = _dir_stats_rp.open("ab", Globals.compression) _dir_stats_fp = _dir_stats_rp.open("ab", Globals.compression)
else: _dir_stats_fp = _dir_stats_rp.open("wb", Globals.compression) else: _dir_stats_fp = _dir_stats_rp.open("wb", Globals.compression)
_dir_stats_fp.write(_dir_stats_header) _dir_stats_fp.write(_dir_stats_header)
...@@ -68,8 +68,8 @@ def close_dir_stats_file(): ...@@ -68,8 +68,8 @@ def close_dir_stats_file():
def write_session_statistics(statobj): def write_session_statistics(statobj):
"""Write session statistics into file, log""" """Write session statistics into file, log"""
stat_inc = Inc.get_inc(Globals.rbdir.append("session_statistics"), stat_inc = increment.get_inc(
Time.curtime, "data") Globals.rbdir.append("session_statistics"), Time.curtime, "data")
statobj.StartTime = Time.curtime statobj.StartTime = Time.curtime
statobj.EndTime = time.time() statobj.EndTime = time.time()
...@@ -85,9 +85,8 @@ def write_session_statistics(statobj): ...@@ -85,9 +85,8 @@ def write_session_statistics(statobj):
statobj.write_stats_to_rp(stat_inc) statobj.write_stats_to_rp(stat_inc)
if Globals.print_statistics: if Globals.print_statistics:
message = statobj.get_stats_logstring("Session statistics") message = statobj.get_stats_logstring("Session statistics")
Log.log_to_file(message) log.Log.log_to_file(message)
Globals.client_conn.sys.stdout.write(message) Globals.client_conn.sys.stdout.write(message)
from increment import *
import Hardlink
...@@ -25,10 +25,10 @@ RobustAction and the like. ...@@ -25,10 +25,10 @@ RobustAction and the like.
""" """
import os, librsync import os, librsync
from log import Log
import robust, TempFile, Globals
class RdiffException(Exception): pass
def get_signature(rp): def get_signature(rp):
"""Take signature of rpin file and return in file object""" """Take signature of rpin file and return in file object"""
Log("Getting signature of %s" % rp.path, 7) Log("Getting signature of %s" % rp.path, 7)
...@@ -52,9 +52,9 @@ def write_delta_action(basis, new, delta, compress = None): ...@@ -52,9 +52,9 @@ def write_delta_action(basis, new, delta, compress = None):
before written to delta. before written to delta.
""" """
delta_tf = TempFileManager.new(delta) delta_tf = TempFile.new(delta)
def init(): write_delta(basis, new, delta_tf, compress) def init(): write_delta(basis, new, delta_tf, compress)
return Robust.make_tf_robustaction(init, delta_tf, delta) return robust.make_tf_robustaction(init, delta_tf, delta)
def write_delta(basis, new, delta, compress = None): def write_delta(basis, new, delta, compress = None):
"""Write rdiff delta which brings basis to new""" """Write rdiff delta which brings basis to new"""
...@@ -74,12 +74,12 @@ def patch_action(rp_basis, rp_delta, rp_out = None, out_tf = None, ...@@ -74,12 +74,12 @@ def patch_action(rp_basis, rp_delta, rp_out = None, out_tf = None,
""" """
if not rp_out: rp_out = rp_basis if not rp_out: rp_out = rp_basis
if not out_tf: out_tf = TempFileManager.new(rp_out) if not out_tf: out_tf = TempFile.new(rp_out)
def init(): def init():
rp_basis.conn.Rdiff.patch_local(rp_basis, rp_delta, rp_basis.conn.Rdiff.patch_local(rp_basis, rp_delta,
out_tf, delta_compressed) out_tf, delta_compressed)
out_tf.setdata() out_tf.setdata()
return Robust.make_tf_robustaction(init, out_tf, rp_out) return robust.make_tf_robustaction(init, out_tf, rp_out)
def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None): def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
"""Patch routine that must be run on rp_basis.conn """Patch routine that must be run on rp_basis.conn
...@@ -99,20 +99,20 @@ def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None): ...@@ -99,20 +99,20 @@ def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
def patch_with_attribs_action(rp_basis, rp_delta, rp_out = None): def patch_with_attribs_action(rp_basis, rp_delta, rp_out = None):
"""Like patch_action, but also transfers attributs from rp_delta""" """Like patch_action, but also transfers attributs from rp_delta"""
if not rp_out: rp_out = rp_basis if not rp_out: rp_out = rp_basis
tf = TempFileManager.new(rp_out) tf = TempFile.new(rp_out)
return Robust.chain_nested(patch_action(rp_basis, rp_delta, rp_out, tf), return robust.chain_nested(patch_action(rp_basis, rp_delta, rp_out, tf),
Robust.copy_attribs_action(rp_delta, tf)) robust.copy_attribs_action(rp_delta, tf))
def copy_action(rpin, rpout): def copy_action(rpin, rpout):
"""Use rdiff to copy rpin to rpout, conserving bandwidth""" """Use rdiff to copy rpin to rpout, conserving bandwidth"""
if not rpin.isreg() or not rpout.isreg() or rpin.conn is rpout.conn: if not rpin.isreg() or not rpout.isreg() or rpin.conn is rpout.conn:
# rdiff not applicable, fallback to regular copying # rdiff not applicable, fallback to regular copying
return Robust.copy_action(rpin, rpout) return robust.copy_action(rpin, rpout)
Log("Rdiff copying %s to %s" % (rpin.path, rpout.path), 6) Log("Rdiff copying %s to %s" % (rpin.path, rpout.path), 6)
out_tf = TempFileManager.new(rpout) out_tf = TempFile.new(rpout)
def init(): rpout.conn.Rdiff.copy_local(rpin, rpout, out_tf) def init(): rpout.conn.Rdiff.copy_local(rpin, rpout, out_tf)
return Robust.make_tf_robustaction(init, out_tf, rpout) return robust.make_tf_robustaction(init, out_tf, rpout)
def copy_local(rpin, rpout, rpnew): def copy_local(rpin, rpout, rpnew):
"""Write rpnew == rpin using rpout as basis. rpout and rpnew local""" """Write rpnew == rpin using rpout as basis. rpout and rpnew local"""
...@@ -122,6 +122,4 @@ def copy_local(rpin, rpout, rpnew): ...@@ -122,6 +122,4 @@ def copy_local(rpin, rpout, rpnew):
rpnew.write_from_fileobj(librsync.PatchedFile(rpout.open("rb"), deltafile)) rpnew.write_from_fileobj(librsync.PatchedFile(rpout.open("rb"), deltafile))
from log import *
from robust import *
...@@ -20,8 +20,7 @@ ...@@ -20,8 +20,7 @@
"""Functions to make sure remote requests are kosher""" """Functions to make sure remote requests are kosher"""
import sys, tempfile import sys, tempfile
import Globals, Main import Globals, Main, rpath
from rpath import *
class Violation(Exception): class Violation(Exception):
"""Exception that indicates an improper request has been received""" """Exception that indicates an improper request has been received"""
...@@ -76,8 +75,8 @@ def set_security_level(action, cmdpairs): ...@@ -76,8 +75,8 @@ def set_security_level(action, cmdpairs):
rdir = tempfile.gettempdir() rdir = tempfile.gettempdir()
elif islocal(cp1): elif islocal(cp1):
sec_level = "read-only" sec_level = "read-only"
rdir = Main.restore_get_root(RPath(Globals.local_connection, rdir = Main.restore_get_root(rpath.RPath(Globals.local_connection,
getpath(cp1)))[0].path getpath(cp1)))[0].path
else: else:
assert islocal(cp2) assert islocal(cp2)
sec_level = "all" sec_level = "all"
...@@ -101,8 +100,8 @@ def set_security_level(action, cmdpairs): ...@@ -101,8 +100,8 @@ def set_security_level(action, cmdpairs):
else: assert 0, "Unknown action %s" % action else: assert 0, "Unknown action %s" % action
Globals.security_level = sec_level Globals.security_level = sec_level
Globals.restrict_path = RPath(Globals.local_connection, Globals.restrict_path = rpath.RPath(Globals.local_connection,
rdir).normalize().path rdir).normalize().path
def set_allowed_requests(sec_level): def set_allowed_requests(sec_level):
"""Set the allowed requests list using the security level""" """Set the allowed requests list using the security level"""
...@@ -111,44 +110,46 @@ def set_allowed_requests(sec_level): ...@@ -111,44 +110,46 @@ def set_allowed_requests(sec_level):
allowed_requests = ["VirtualFile.readfromid", "VirtualFile.closebyid", allowed_requests = ["VirtualFile.readfromid", "VirtualFile.closebyid",
"Globals.get", "Globals.is_not_None", "Globals.get", "Globals.is_not_None",
"Globals.get_dict_val", "Globals.get_dict_val",
"Log.open_logfile_allconn", "log.Log.open_logfile_allconn",
"Log.close_logfile_allconn", "log.Log.close_logfile_allconn",
"SetConnections.add_redirected_conn", "SetConnections.add_redirected_conn",
"RedirectedRun", "RedirectedRun",
"sys.stdout.write"] "sys.stdout.write"]
if sec_level == "minimal": pass if sec_level == "minimal": pass
elif sec_level == "read-only" or sec_level == "update-only": elif sec_level == "read-only" or sec_level == "update-only":
allowed_requests.extend(["C.make_file_dict", allowed_requests.extend(
"os.getuid", ["C.make_file_dict",
"os.listdir", "os.getuid",
"Time.setcurtime_local", "os.listdir",
"Resume.ResumeCheck", "Time.setcurtime_local",
"HLSourceStruct.split_initial_dsiter", "robust.Resume.ResumeCheck",
"HLSourceStruct.get_diffs_and_finalize", "highlevel.HLSourceStruct.split_initial_dsiter",
"RPathStatic.gzip_open_local_read", "highlevel.HLSourceStruct.get_diffs_and_finalize",
"RPathStatic.open_local_read"]) "rpath.gzip_open_local_read",
"rpath.open_local_read"])
if sec_level == "update-only": if sec_level == "update-only":
allowed_requests. \ allowed_requests.extend(
extend(["Log.open_logfile_local", "Log.close_logfile_local", ["Log.open_logfile_local", "Log.close_logfile_local",
"Log.close_logfile_allconn", "Log.log_to_file", "Log.close_logfile_allconn", "Log.log_to_file",
"SaveState.init_filenames", "robust.SaveState.init_filenames",
"SaveState.touch_last_file", "robust.SaveState.touch_last_file",
"HLDestinationStruct.get_sigs", "highlevel.HLDestinationStruct.get_sigs",
"HLDestinationStruct.patch_w_datadir_writes", "highlevel.HLDestinationStruct.patch_w_datadir_writes",
"HLDestinationStruct.patch_and_finalize", "highlevel.HLDestinationStruct.patch_and_finalize",
"HLDestinationStruct.patch_increment_and_finalize", "highlevel.HLDestinationStruct.patch_increment_and_finalize",
"Main.backup_touch_curmirror_local", "Main.backup_touch_curmirror_local",
"Globals.ITRB.increment_stat"]) "Globals.ITRB.increment_stat"])
if Globals.server: if Globals.server:
allowed_requests.extend(["SetConnections.init_connection_remote", allowed_requests.extend(
"Log.setverbosity", ["SetConnections.init_connection_remote",
"Log.setterm_verbosity", "Log.setverbosity",
"Time.setprevtime_local", "Log.setterm_verbosity",
"FilenameMapping.set_init_quote_vals_local", "Time.setprevtime_local",
"Globals.postset_regexp_local", "FilenameMapping.set_init_quote_vals_local",
"Globals.set_select", "Globals.postset_regexp_local",
"HLSourceStruct.set_session_info", "Globals.set_select",
"HLDestinationStruct.set_session_info"]) "highlevel.HLSourceStruct.set_session_info",
"highlevel.HLDestinationStruct.set_session_info"])
def vet_request(request, arglist): def vet_request(request, arglist):
"""Examine request for security violations""" """Examine request for security violations"""
...@@ -156,7 +157,7 @@ def vet_request(request, arglist): ...@@ -156,7 +157,7 @@ def vet_request(request, arglist):
security_level = Globals.security_level security_level = Globals.security_level
if Globals.restrict_path: if Globals.restrict_path:
for arg in arglist: for arg in arglist:
if isinstance(arg, RPath): vet_rpath(arg) if isinstance(arg, rpath.RPath): vet_rpath(arg)
if security_level == "all": return if security_level == "all": return
if request.function_string in allowed_requests: return if request.function_string in allowed_requests: return
if request.function_string == "Globals.set": if request.function_string == "Globals.set":
......
...@@ -25,6 +25,10 @@ the related connections. ...@@ -25,6 +25,10 @@ the related connections.
""" """
import os
from log import Log
import Globals, FilenameMapping, connection, rpath
# This is the schema that determines how rdiff-backup will open a # This is the schema that determines how rdiff-backup will open a
# pipe to the remote system. If the file is given as A::B, %s will # pipe to the remote system. If the file is given as A::B, %s will
# be substituted with A in the schema. # be substituted with A in the schema.
...@@ -68,7 +72,7 @@ def cmdpair2rp(cmd_pair): ...@@ -68,7 +72,7 @@ def cmdpair2rp(cmd_pair):
cmd, filename = cmd_pair cmd, filename = cmd_pair
if cmd: conn = init_connection(cmd) if cmd: conn = init_connection(cmd)
else: conn = Globals.local_connection else: conn = Globals.local_connection
return RPath(conn, filename).normalize() return rpath.RPath(conn, filename).normalize()
def desc2cmd_pairs(desc_pair): def desc2cmd_pairs(desc_pair):
"""Return pair (remote_cmd, filename) from desc_pair""" """Return pair (remote_cmd, filename) from desc_pair"""
...@@ -127,7 +131,7 @@ def init_connection(remote_cmd): ...@@ -127,7 +131,7 @@ def init_connection(remote_cmd):
Log("Executing " + remote_cmd, 4) Log("Executing " + remote_cmd, 4)
stdin, stdout = os.popen2(remote_cmd) stdin, stdout = os.popen2(remote_cmd)
conn_number = len(Globals.connections) conn_number = len(Globals.connections)
conn = PipeConnection(stdout, stdin, conn_number) conn = connection.PipeConnection(stdout, stdin, conn_number)
check_connection_version(conn, remote_cmd) check_connection_version(conn, remote_cmd)
Log("Registering connection %d" % conn_number, 7) Log("Registering connection %d" % conn_number, 7)
...@@ -138,7 +142,7 @@ def init_connection(remote_cmd): ...@@ -138,7 +142,7 @@ def init_connection(remote_cmd):
def check_connection_version(conn, remote_cmd): def check_connection_version(conn, remote_cmd):
"""Log warning if connection has different version""" """Log warning if connection has different version"""
try: remote_version = conn.Globals.get('version') try: remote_version = conn.Globals.get('version')
except ConnectionReadError, exception: except connection.ConnectionReadError, exception:
Log.FatalError("""%s Log.FatalError("""%s
Couldn't start up the remote connection by executing Couldn't start up the remote connection by executing
...@@ -184,7 +188,7 @@ def init_connection_remote(conn_number): ...@@ -184,7 +188,7 @@ def init_connection_remote(conn_number):
def add_redirected_conn(conn_number): def add_redirected_conn(conn_number):
"""Run on server side - tell about redirected connection""" """Run on server side - tell about redirected connection"""
Globals.connection_dict[conn_number] = \ Globals.connection_dict[conn_number] = \
RedirectedConnection(conn_number) connection.RedirectedConnection(conn_number)
def UpdateGlobal(setting_name, val): def UpdateGlobal(setting_name, val):
"""Update value of global variable across all connections""" """Update value of global variable across all connections"""
...@@ -230,9 +234,3 @@ Local version: %s ...@@ -230,9 +234,3 @@ Local version: %s
Remote version: %s""" % (Globals.version, version) Remote version: %s""" % (Globals.version, version)
else: print "Server OK" else: print "Server OK"
from log import *
from rpath import *
from connection import *
import Globals, FilenameMapping
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"""Support code for remote execution and data transfer""" """Support code for remote execution and data transfer"""
from __future__ import generators from __future__ import generators
import types, os, tempfile, cPickle, shutil, traceback, pickle, socket import types, os, tempfile, cPickle, shutil, traceback, pickle, socket, sys
class ConnectionError(Exception): pass class ConnectionError(Exception): pass
...@@ -121,11 +121,13 @@ class LowLevelPipeConnection(Connection): ...@@ -121,11 +121,13 @@ class LowLevelPipeConnection(Connection):
"""Put an object into the pipe (will send raw if string)""" """Put an object into the pipe (will send raw if string)"""
Log.conn("sending", obj, req_num) Log.conn("sending", obj, req_num)
if type(obj) is types.StringType: self._putbuf(obj, req_num) if type(obj) is types.StringType: self._putbuf(obj, req_num)
elif isinstance(obj, Connection): self._putconn(obj, req_num) elif isinstance(obj, connection.Connection):self._putconn(obj, req_num)
elif isinstance(obj, TempFile): self._puttempfile(obj, req_num) elif isinstance(obj, TempFile.TempFile):
elif isinstance(obj, DSRPath): self._putdsrpath(obj, req_num) self._puttempfile(obj, req_num)
elif isinstance(obj, RPath): self._putrpath(obj, req_num) elif isinstance(obj, destructive_stepping.DSRPath):
elif isinstance(obj, RORPath): self._putrorpath(obj, req_num) self._putdsrpath(obj, req_num)
elif isinstance(obj, rpath.RPath): self._putrpath(obj, req_num)
elif isinstance(obj, rpath.RORPath): self._putrorpath(obj, req_num)
elif ((hasattr(obj, "read") or hasattr(obj, "write")) elif ((hasattr(obj, "read") or hasattr(obj, "write"))
and hasattr(obj, "close")): self._putfile(obj, req_num) and hasattr(obj, "close")): self._putfile(obj, req_num)
elif hasattr(obj, "next"): self._putiter(obj, req_num) elif hasattr(obj, "next"): self._putiter(obj, req_num)
...@@ -146,7 +148,7 @@ class LowLevelPipeConnection(Connection): ...@@ -146,7 +148,7 @@ class LowLevelPipeConnection(Connection):
def _putiter(self, iterator, req_num): def _putiter(self, iterator, req_num):
"""Put an iterator through the pipe""" """Put an iterator through the pipe"""
self._write("i", str(VirtualFile.new(RORPIter.ToFile(iterator))), self._write("i", str(VirtualFile.new(rorpiter.ToFile(iterator))),
req_num) req_num)
def _puttempfile(self, tempfile, req_num): def _puttempfile(self, tempfile, req_num):
...@@ -239,8 +241,8 @@ class LowLevelPipeConnection(Connection): ...@@ -239,8 +241,8 @@ class LowLevelPipeConnection(Connection):
elif format_string == "b": result = data elif format_string == "b": result = data
elif format_string == "f": result = VirtualFile(self, int(data)) elif format_string == "f": result = VirtualFile(self, int(data))
elif format_string == "i": elif format_string == "i":
result = RORPIter.FromFile(BufferedRead(VirtualFile(self, result = rorpiter.FromFile(iterfile.BufferedRead(
int(data)))) VirtualFile(self, int(data))))
elif format_string == "t": result = self._gettempfile(data) elif format_string == "t": result = self._gettempfile(data)
elif format_string == "r": result = self._getrorpath(data) elif format_string == "r": result = self._getrorpath(data)
elif format_string == "R": result = self._getrpath(data) elif format_string == "R": result = self._getrpath(data)
...@@ -254,23 +256,25 @@ class LowLevelPipeConnection(Connection): ...@@ -254,23 +256,25 @@ class LowLevelPipeConnection(Connection):
def _getrorpath(self, raw_rorpath_buf): def _getrorpath(self, raw_rorpath_buf):
"""Reconstruct RORPath object from raw data""" """Reconstruct RORPath object from raw data"""
index, data = cPickle.loads(raw_rorpath_buf) index, data = cPickle.loads(raw_rorpath_buf)
return RORPath(index, data) return rpath.RORPath(index, data)
def _gettempfile(self, raw_tf_buf): def _gettempfile(self, raw_tf_buf):
"""Return TempFile object indicated by raw_tf_buf""" """Return TempFile object indicated by raw_tf_buf"""
conn_number, base, index, data = cPickle.loads(raw_tf_buf) conn_number, base, index, data = cPickle.loads(raw_tf_buf)
return TempFile(Globals.connection_dict[conn_number], return TempFile.TempFile(Globals.connection_dict[conn_number],
base, index, data) base, index, data)
def _getrpath(self, raw_rpath_buf): def _getrpath(self, raw_rpath_buf):
"""Return RPath object indicated by raw_rpath_buf""" """Return RPath object indicated by raw_rpath_buf"""
conn_number, base, index, data = cPickle.loads(raw_rpath_buf) conn_number, base, index, data = cPickle.loads(raw_rpath_buf)
return RPath(Globals.connection_dict[conn_number], base, index, data) return rpath.RPath(Globals.connection_dict[conn_number],
base, index, data)
def _getdsrpath(self, raw_dsrpath_buf): def _getdsrpath(self, raw_dsrpath_buf):
"""Return DSRPath object indicated by buf""" """Return DSRPath object indicated by buf"""
conn_number, state_dict = cPickle.loads(raw_dsrpath_buf) conn_number, state_dict = cPickle.loads(raw_dsrpath_buf)
empty_dsrp = DSRPath("bypass", Globals.local_connection, None) empty_dsrp = destructive_stepping.DSRPath("bypass",
Globals.local_connection, None)
empty_dsrp.__setstate__(state_dict) empty_dsrp.__setstate__(state_dict)
empty_dsrp.conn = Globals.connection_dict[conn_number] empty_dsrp.conn = Globals.connection_dict[conn_number]
empty_dsrp.file = None empty_dsrp.file = None
...@@ -538,22 +542,11 @@ class VirtualFile: ...@@ -538,22 +542,11 @@ class VirtualFile:
# everything has to be available here for remote connection's use, but # everything has to be available here for remote connection's use, but
# put at bottom to reduce circularities. # put at bottom to reduce circularities.
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, Main import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \
from static import * Main, rorpiter, selection, increment, statistics, manage, lazy, \
from lazy import * iterfile, rpath, robust, restore, manage, highlevel, connection, \
from log import * TempFile, destructive_stepping, SetConnections
from iterfile import * from log import Log
from connection import *
from rpath import *
from robust import *
from rorpiter import *
from selection import *
from statistics import *
from increment import *
from restore import *
from manage import *
from highlevel import *
Globals.local_connection = LocalConnection() Globals.local_connection = LocalConnection()
Globals.connections.append(Globals.local_connection) Globals.connections.append(Globals.local_connection)
......
# Copyright 2002 Ben Escoto # Copyright 2002 Ben Escoto
# #
# This file is part of rdiff-backup. # This file is part of rdiff-backup.
...@@ -21,14 +22,14 @@ ...@@ -21,14 +22,14 @@
from __future__ import generators from __future__ import generators
import types import types
from rpath import * import Globals, rpath, log
from lazy import *
class DSRPPermError(Exception): class DSRPPermError(Exception):
"""Exception used when a DSRPath can't get sufficient permissions""" """Exception used when a DSRPath can't get sufficient permissions"""
pass pass
class DSRPath(RPath): class DSRPath(rpath.RPath):
"""Destructive Stepping RPath """Destructive Stepping RPath
Sometimes when we traverse the directory tree, even when we just Sometimes when we traverse the directory tree, even when we just
...@@ -59,11 +60,11 @@ class DSRPath(RPath): ...@@ -59,11 +60,11 @@ class DSRPath(RPath):
""" """
if base == 0: if base == 0:
assert isinstance(conn_or_rp, RPath) assert isinstance(conn_or_rp, rpath.RPath)
RPath.__init__(self, conn_or_rp.conn, rpath.RPath.__init__(self, conn_or_rp.conn,
conn_or_rp.base, conn_or_rp.index) conn_or_rp.base, conn_or_rp.index)
self.path = conn_or_rp.path # conn_or_rp may be quoted self.path = conn_or_rp.path # conn_or_rp may be quoted
else: RPath.__init__(self, conn_or_rp, base, index) else: rpath.RPath.__init__(self, conn_or_rp, base, index)
if source != "bypass": if source != "bypass":
# "bypass" val is used when unpackaging over connection # "bypass" val is used when unpackaging over connection
...@@ -107,8 +108,8 @@ class DSRPath(RPath): ...@@ -107,8 +108,8 @@ class DSRPath(RPath):
if not self.hasfullperms(): self.chmod_bypass(0700) if not self.hasfullperms(): self.chmod_bypass(0700)
def warn(self, err): def warn(self, err):
Log("Received error '%s' when dealing with file %s, skipping..." log.Log("Received error '%s' when dealing with file %s, skipping..."
% (err, self.path), 1) % (err, self.path), 1)
raise DSRPPermError(self.path) raise DSRPPermError(self.path)
def __getstate__(self): def __getstate__(self):
...@@ -136,7 +137,7 @@ class DSRPath(RPath): ...@@ -136,7 +137,7 @@ class DSRPath(RPath):
def chmod(self, permissions): def chmod(self, permissions):
"""Change permissions, delaying if self.perms_delayed is set""" """Change permissions, delaying if self.perms_delayed is set"""
if self.delay_perms: self.newperms = self.data['perms'] = permissions if self.delay_perms: self.newperms = self.data['perms'] = permissions
else: RPath.chmod(self, permissions) else: rpath.RPath.chmod(self, permissions)
def getperms(self): def getperms(self):
"""Return dsrp's intended permissions""" """Return dsrp's intended permissions"""
...@@ -148,7 +149,7 @@ class DSRPath(RPath): ...@@ -148,7 +149,7 @@ class DSRPath(RPath):
"""Change permissions without updating the data dictionary""" """Change permissions without updating the data dictionary"""
self.delay_perms = 1 self.delay_perms = 1
if self.newperms is None: self.newperms = self.getperms() if self.newperms is None: self.newperms = self.getperms()
Log("DSRP: Perm bypass %s to %o" % (self.path, permissions), 8) log.Log("DSRP: Perm bypass %s to %o" % (self.path, permissions), 8)
self.conn.os.chmod(self.path, permissions) self.conn.os.chmod(self.path, permissions)
def settime(self, accesstime, modtime): def settime(self, accesstime, modtime):
...@@ -157,12 +158,12 @@ class DSRPath(RPath): ...@@ -157,12 +158,12 @@ class DSRPath(RPath):
if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime
if not self.delay_atime or not self.delay_mtime: if not self.delay_atime or not self.delay_mtime:
RPath.settime(self, accesstime, modtime) rpath.RPath.settime(self, accesstime, modtime)
def setmtime(self, modtime): def setmtime(self, modtime):
"""Change mtime, delaying if self.times_delayed is set""" """Change mtime, delaying if self.times_delayed is set"""
if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime
else: RPath.setmtime(self, modtime) else: rpath.RPath.setmtime(self, modtime)
def getmtime(self): def getmtime(self):
"""Return dsrp's intended modification time""" """Return dsrp's intended modification time"""
...@@ -181,18 +182,18 @@ class DSRPath(RPath): ...@@ -181,18 +182,18 @@ class DSRPath(RPath):
if not self.lstat(): return # File has been deleted in meantime if not self.lstat(): return # File has been deleted in meantime
if self.delay_perms and self.newperms is not None: if self.delay_perms and self.newperms is not None:
Log("Finalizing permissions of dsrp %s to %s" % log.Log("Finalizing permissions of dsrp %s to %s" %
(self.path, self.newperms), 8) (self.path, self.newperms), 8)
RPath.chmod(self, self.newperms) rpath.RPath.chmod(self, self.newperms)
do_atime = self.delay_atime and self.newatime is not None do_atime = self.delay_atime and self.newatime is not None
do_mtime = self.delay_mtime and self.newmtime is not None do_mtime = self.delay_mtime and self.newmtime is not None
if do_atime and do_mtime: if do_atime and do_mtime:
RPath.settime(self, self.newatime, self.newmtime) rpath.RPath.settime(self, self.newatime, self.newmtime)
elif do_atime and not do_mtime: elif do_atime and not do_mtime:
RPath.settime(self, self.newatime, self.getmtime()) rpath.RPath.settime(self, self.newatime, self.getmtime())
elif not do_atime and do_mtime: elif not do_atime and do_mtime:
RPath.setmtime(self, self.newmtime) rpath.RPath.setmtime(self, self.newmtime)
def newpath(self, newpath, index = ()): def newpath(self, newpath, index = ()):
"""Return similar DSRPath but with new path""" """Return similar DSRPath but with new path"""
...@@ -208,29 +209,4 @@ class DSRPath(RPath): ...@@ -208,29 +209,4 @@ class DSRPath(RPath):
return self.__class__(self.source, self.conn, self.base, index) return self.__class__(self.source, self.conn, self.base, index)
class DestructiveSteppingFinalizer(ITRBranch):
"""Finalizer that can work on an iterator of dsrpaths
The reason we have to use an IterTreeReducer is that some files
should be updated immediately, but for directories we sometimes
need to update all the files in the directory before finally
coming back to it.
"""
dsrpath = None
def start_process(self, index, dsrpath):
self.dsrpath = dsrpath
def end_process(self):
if self.dsrpath: self.dsrpath.write_changes()
def can_fast_process(self, index, dsrpath):
return not self.dsrpath.isdir()
def fast_process(self, index, dsrpath):
if self.dsrpath: self.dsrpath.write_changes()
from log import *
from robust import *
import Globals
from __future__ import generators from __future__ import generators
from manage import * import rpath, manage
from rpath import *
####################################################################### #######################################################################
# #
......
...@@ -20,17 +20,8 @@ ...@@ -20,17 +20,8 @@
"""High level functions for mirroring, mirror & inc, etc.""" """High level functions for mirroring, mirror & inc, etc."""
from __future__ import generators from __future__ import generators
from static import * import Globals, MiscStats, metadata, rorpiter, TempFile, \
Hardlink, robust, increment, rpath, lazy, static, log
class SkipFileException(Exception):
"""Signal that the current file should be skipped but then continue
This exception will often be raised when there is problem reading
an individual file, but it makes sense for the rest of the backup
to keep going.
"""
pass
class HighLevel: class HighLevel:
...@@ -48,8 +39,8 @@ class HighLevel: ...@@ -48,8 +39,8 @@ class HighLevel:
Otherwise only mirror and don't create any extra files. Otherwise only mirror and don't create any extra files.
""" """
SourceS = src_rpath.conn.HLSourceStruct SourceS = src_rpath.conn.highlevel.HLSourceStruct
DestS = dest_rpath.conn.HLDestinationStruct DestS = dest_rpath.conn.highlevel.HLDestinationStruct
src_init_dsiter = SourceS.split_initial_dsiter() src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter) dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
...@@ -61,8 +52,8 @@ class HighLevel: ...@@ -61,8 +52,8 @@ class HighLevel:
def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath, def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath,
session_info = None): session_info = None):
"""Mirror + put increments in tree based at inc_rpath""" """Mirror + put increments in tree based at inc_rpath"""
SourceS = src_rpath.conn.HLSourceStruct SourceS = src_rpath.conn.highlevel.HLSourceStruct
DestS = dest_rpath.conn.HLDestinationStruct DestS = dest_rpath.conn.highlevel.HLDestinationStruct
src_init_dsiter = SourceS.split_initial_dsiter() src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter) dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
...@@ -72,7 +63,7 @@ class HighLevel: ...@@ -72,7 +63,7 @@ class HighLevel:
dest_rpath.setdata() dest_rpath.setdata()
inc_rpath.setdata() inc_rpath.setdata()
MakeStatic(HighLevel) static.MakeStatic(HighLevel)
class HLSourceStruct: class HLSourceStruct:
...@@ -80,7 +71,7 @@ class HLSourceStruct: ...@@ -80,7 +71,7 @@ class HLSourceStruct:
def split_initial_dsiter(cls): def split_initial_dsiter(cls):
"""Set iterators of all dsrps from rpath, returning one""" """Set iterators of all dsrps from rpath, returning one"""
dsiter = Globals.select_source.set_iter() dsiter = Globals.select_source.set_iter()
initial_dsiter1, cls.initial_dsiter2 = Iter.multiplex(dsiter, 2) initial_dsiter1, cls.initial_dsiter2 = lazy.Iter.multiplex(dsiter, 2)
return initial_dsiter1 return initial_dsiter1
def get_diffs_and_finalize(cls, sigiter): def get_diffs_and_finalize(cls, sigiter):
...@@ -90,10 +81,10 @@ class HLSourceStruct: ...@@ -90,10 +81,10 @@ class HLSourceStruct:
dissimilar files. dissimilar files.
""" """
collated = RORPIter.CollateIterators(cls.initial_dsiter2, sigiter) collated = rorpiter.CollateIterators(cls.initial_dsiter2, sigiter)
def error_handler(exc, dest_sig, rp): def error_handler(exc, dest_sig, rp):
Log("Error %s producing a diff of %s" % log.Log("Error %s producing a diff of %s" %
(exc, rp and rp.path), 2) (exc, rp and rp.path), 2)
return None return None
def diffs(): def diffs():
...@@ -101,12 +92,12 @@ class HLSourceStruct: ...@@ -101,12 +92,12 @@ class HLSourceStruct:
if dest_sig: if dest_sig:
if dest_sig.isplaceholder(): yield dest_sig if dest_sig.isplaceholder(): yield dest_sig
else: else:
diff = Robust.check_common_error( diff = robust.check_common_error(
error_handler, RORPIter.diffonce, [dest_sig, rp]) error_handler, rorpiter.diffonce, [dest_sig, rp])
if diff: yield diff if diff: yield diff
return diffs() return diffs()
MakeClass(HLSourceStruct) static.MakeClass(HLSourceStruct)
class HLDestinationStruct: class HLDestinationStruct:
...@@ -115,7 +106,7 @@ class HLDestinationStruct: ...@@ -115,7 +106,7 @@ class HLDestinationStruct:
def split_initial_dsiter(cls): def split_initial_dsiter(cls):
"""Set initial_dsiters (iteration of all rps from rpath)""" """Set initial_dsiters (iteration of all rps from rpath)"""
result, cls.initial_dsiter2 = \ result, cls.initial_dsiter2 = \
Iter.multiplex(Globals.select_mirror.set_iter(), 2) lazy.Iter.multiplex(Globals.select_mirror.set_iter(), 2)
return result return result
def get_dissimilar(cls, baserp, src_init_iter, dest_init_iter): def get_dissimilar(cls, baserp, src_init_iter, dest_init_iter):
...@@ -134,14 +125,14 @@ class HLDestinationStruct: ...@@ -134,14 +125,14 @@ class HLDestinationStruct:
will depend on the Globals.conn_bufsize value. will depend on the Globals.conn_bufsize value.
""" """
collated = RORPIter.CollateIterators(src_init_iter, dest_init_iter) collated = rorpiter.CollateIterators(src_init_iter, dest_init_iter)
def compare(src_rorp, dest_dsrp): def compare(src_rorp, dest_dsrp):
"""Return dest_dsrp if they are different, None if the same""" """Return dest_dsrp if they are different, None if the same"""
if not dest_dsrp: if not dest_dsrp:
dest_dsrp = cls.get_dsrp(baserp, src_rorp.index) dest_dsrp = cls.get_dsrp(baserp, src_rorp.index)
if dest_dsrp.lstat(): if dest_dsrp.lstat():
Log("Warning: Found unexpected destination file %s, " log.Log("Warning: Found unexpected destination file %s, "
"not processing it." % dest_dsrp.path, 2) "not processing it." % dest_dsrp.path, 2)
return None return None
elif (src_rorp and src_rorp == dest_dsrp and elif (src_rorp and src_rorp == dest_dsrp and
(not Globals.preserve_hardlinks or (not Globals.preserve_hardlinks or
...@@ -162,7 +153,7 @@ class HLDestinationStruct: ...@@ -162,7 +153,7 @@ class HLDestinationStruct:
counter = 0 counter = 0
yield dsrp yield dsrp
elif counter == 20: elif counter == 20:
placeholder = RORPath(src_rorp.index) placeholder = rpath.RORPath(src_rorp.index)
placeholder.make_placeholder() placeholder.make_placeholder()
counter = 0 counter = 0
yield placeholder yield placeholder
...@@ -185,11 +176,11 @@ class HLDestinationStruct: ...@@ -185,11 +176,11 @@ class HLDestinationStruct:
metadata.CloseMetadata() metadata.CloseMetadata()
dup = duplicate_with_write(src_init_iter) dup = duplicate_with_write(src_init_iter)
dissimilars = cls.get_dissimilar(baserp, dup, dest_iters1) dissimilars = cls.get_dissimilar(baserp, dup, dest_iters1)
return RORPIter.Signatures(dissimilars) return rorpiter.Signatures(dissimilars)
def get_dsrp(cls, dest_rpath, index): def get_dsrp(cls, dest_rpath, index):
"""Return initialized rpath based on dest_rpath with given index""" """Return initialized rpath based on dest_rpath with given index"""
rp = RPath(dest_rpath.conn, dest_rpath.base, index) rp = rpath.RPath(dest_rpath.conn, dest_rpath.base, index)
if Globals.quoting_enabled: rp.quote_path() if Globals.quoting_enabled: rp.quote_path()
return rp return rp
...@@ -197,14 +188,16 @@ class HLDestinationStruct: ...@@ -197,14 +188,16 @@ class HLDestinationStruct:
"""Return finalizer, starting from session info if necessary""" """Return finalizer, starting from session info if necessary"""
old_finalizer = cls._session_info and cls._session_info.finalizer old_finalizer = cls._session_info and cls._session_info.finalizer
if old_finalizer: return old_finalizer if old_finalizer: return old_finalizer
else: return IterTreeReducer(DestructiveSteppingFinalizer, []) else: return rorpiter.IterTreeReducer(
rorpiter.DestructiveSteppingFinalizer, [])
def get_ITR(cls, inc_rpath): def get_ITR(cls, inc_rpath):
"""Return ITR, starting from state if necessary""" """Return ITR, starting from state if necessary"""
if cls._session_info and cls._session_info.ITR: if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR return cls._session_info.ITR
else: else:
iitr = IterTreeReducer(IncrementITRB, [inc_rpath]) iitr = rorpiter.IterTreeReducer(increment.IncrementITRB,
[inc_rpath])
iitr.root_branch.override_changed() iitr.root_branch.override_changed()
Globals.ITRB = iitr.root_branch Globals.ITRB = iitr.root_branch
iitr.root_branch.Errors = 0 iitr.root_branch.Errors = 0
...@@ -214,38 +207,38 @@ class HLDestinationStruct: ...@@ -214,38 +207,38 @@ class HLDestinationStruct:
"""Return MirrorITR, starting from state if available""" """Return MirrorITR, starting from state if available"""
if cls._session_info and cls._session_info.ITR: if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR return cls._session_info.ITR
ITR = IterTreeReducer(MirrorITRB, [inc_rpath]) ITR = rorpiter.IterTreeReducer(increment.MirrorITRB, [inc_rpath])
Globals.ITRB = ITR.root_branch Globals.ITRB = ITR.root_branch
ITR.root_branch.Errors = 0 ITR.root_branch.Errors = 0
return ITR return ITR
def patch_and_finalize(cls, dest_rpath, diffs): def patch_and_finalize(cls, dest_rpath, diffs):
"""Apply diffs and finalize""" """Apply diffs and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2) collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer = cls.get_finalizer() #finalizer = cls.get_finalizer()
diff_rorp, rp = None, None diff_rorp, rp = None, None
def patch(diff_rorp, dsrp): def patch(diff_rorp, dsrp):
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index) if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and not diff_rorp.isplaceholder(): if diff_rorp and not diff_rorp.isplaceholder():
RORPIter.patchonce_action(None, dsrp, diff_rorp).execute() rorpiter.patchonce_action(None, dsrp, diff_rorp).execute()
return dsrp return dsrp
def error_handler(exc, diff_rorp, dsrp): def error_handler(exc, diff_rorp, dsrp):
filename = dsrp and dsrp.path or os.path.join(*diff_rorp.index) filename = dsrp and dsrp.path or os.path.join(*diff_rorp.index)
Log("Error: %s processing file %s" % (exc, filename), 2) log.Log("Error: %s processing file %s" % (exc, filename), 2)
for indexed_tuple in collated: for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7) log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple diff_rorp, dsrp = indexed_tuple
dsrp = Robust.check_common_error(error_handler, patch, dsrp = robust.check_common_error(error_handler, patch,
[diff_rorp, dsrp]) [diff_rorp, dsrp])
#finalizer(dsrp.index, dsrp) #finalizer(dsrp.index, dsrp)
#finalizer.Finish() #finalizer.Finish()
def patch_w_datadir_writes(cls, dest_rpath, diffs, inc_rpath): def patch_w_datadir_writes(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs and finalize, with checkpointing and statistics""" """Apply diffs and finalize, with checkpointing and statistics"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2) collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer, ITR = cls.get_finalizer(), cls.get_MirrorITR(inc_rpath) #finalizer, ITR = cls.get_finalizer(), cls.get_MirrorITR(inc_rpath)
finalizer, ITR = None, cls.get_MirrorITR(inc_rpath) finalizer, ITR = None, cls.get_MirrorITR(inc_rpath)
MiscStats.open_dir_stats_file() MiscStats.open_dir_stats_file()
...@@ -253,7 +246,7 @@ class HLDestinationStruct: ...@@ -253,7 +246,7 @@ class HLDestinationStruct:
try: try:
for indexed_tuple in collated: for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7) log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple diff_rorp, dsrp = indexed_tuple
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index) if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and diff_rorp.isplaceholder(): diff_rorp = None if diff_rorp and diff_rorp.isplaceholder(): diff_rorp = None
...@@ -270,7 +263,7 @@ class HLDestinationStruct: ...@@ -270,7 +263,7 @@ class HLDestinationStruct:
def patch_increment_and_finalize(cls, dest_rpath, diffs, inc_rpath): def patch_increment_and_finalize(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs, write increment if necessary, and finalize""" """Apply diffs, write increment if necessary, and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2) collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer, ITR = cls.get_finalizer(), cls.get_ITR(inc_rpath) #finalizer, ITR = cls.get_finalizer(), cls.get_ITR(inc_rpath)
finalizer, ITR = None, cls.get_ITR(inc_rpath) finalizer, ITR = None, cls.get_ITR(inc_rpath)
MiscStats.open_dir_stats_file() MiscStats.open_dir_stats_file()
...@@ -278,7 +271,7 @@ class HLDestinationStruct: ...@@ -278,7 +271,7 @@ class HLDestinationStruct:
try: try:
for indexed_tuple in collated: for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7) log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple diff_rorp, dsrp = indexed_tuple
index = indexed_tuple.index index = indexed_tuple.index
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, index) if not dsrp: dsrp = cls.get_dsrp(dest_rpath, index)
...@@ -296,18 +289,12 @@ class HLDestinationStruct: ...@@ -296,18 +289,12 @@ class HLDestinationStruct:
def handle_last_error(cls, dsrp, finalizer, ITR): def handle_last_error(cls, dsrp, finalizer, ITR):
"""If catch fatal error, try to checkpoint before exiting""" """If catch fatal error, try to checkpoint before exiting"""
Log.exception(1, 2) log.Log.exception(1, 2)
TracebackArchive.log() robust.TracebackArchive.log()
#SaveState.checkpoint(ITR, finalizer, dsrp, 1) #SaveState.checkpoint(ITR, finalizer, dsrp, 1)
#if Globals.preserve_hardlinks: Hardlink.final_checkpoint(Globals.rbdir) #if Globals.preserve_hardlinks: Hardlink.final_checkpoint(Globals.rbdir)
#SaveState.touch_last_file_definitive() #SaveState.touch_last_file_definitive()
raise raise
MakeClass(HLDestinationStruct) static.MakeClass(HLDestinationStruct)
from log import *
from rpath import *
from robust import *
from increment import *
from rorpiter import *
import Globals, Hardlink, MiscStats, metadata
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"""Convert an iterator to a file object and vice-versa""" """Convert an iterator to a file object and vice-versa"""
import cPickle, array import cPickle, array
import Globals, C import Globals, C, robust, log
class IterFileException(Exception): pass class IterFileException(Exception): pass
...@@ -200,7 +200,7 @@ class FileWrappingIter: ...@@ -200,7 +200,7 @@ class FileWrappingIter:
def addfromfile(self): def addfromfile(self):
"""Read a chunk from the current file and return it""" """Read a chunk from the current file and return it"""
# Check file read for errors, buf = "" if find one # Check file read for errors, buf = "" if find one
buf = Robust.check_common_error(self.read_error_handler, buf = robust.check_common_error(self.read_error_handler,
self.currently_in_file.read, self.currently_in_file.read,
[Globals.blocksize]) [Globals.blocksize])
if not buf: if not buf:
...@@ -210,7 +210,7 @@ class FileWrappingIter: ...@@ -210,7 +210,7 @@ class FileWrappingIter:
def read_error_handler(self, exc, blocksize): def read_error_handler(self, exc, blocksize):
"""Log error when reading from file""" """Log error when reading from file"""
Log("Error '%s' reading from fileobj, truncating" % (str(exc),), 2) log.Log("Error '%s' reading from fileobj, truncating" % (str(exc),), 2)
return "" return ""
def _l2s_old(self, l): def _l2s_old(self, l):
...@@ -253,5 +253,4 @@ class BufferedRead: ...@@ -253,5 +253,4 @@ class BufferedRead:
def close(self): return self.file.close() def close(self): return self.file.close()
from log import *
from robust import *
...@@ -21,7 +21,8 @@ ...@@ -21,7 +21,8 @@
from __future__ import generators from __future__ import generators
import os, stat, types import os, stat, types
from static import * import static
class Iter: class Iter:
"""Hold static methods for the manipulation of lazy iterators""" """Hold static methods for the manipulation of lazy iterators"""
...@@ -163,7 +164,7 @@ class Iter: ...@@ -163,7 +164,7 @@ class Iter:
return tuple(map(make_iterator, range(num_of_forks))) return tuple(map(make_iterator, range(num_of_forks)))
MakeStatic(Iter) static.MakeStatic(Iter)
class IterMultiplex2: class IterMultiplex2:
...@@ -200,166 +201,3 @@ class IterMultiplex2: ...@@ -200,166 +201,3 @@ class IterMultiplex2:
else: elem = buf.pop(0) # a is in front, subtract an element else: elem = buf.pop(0) # a is in front, subtract an element
self.a_leading_by -= 1 self.a_leading_by -= 1
yield elem yield elem
class IterTreeReducer:
"""Tree style reducer object for iterator
The indicies of a RORPIter form a tree type structure. This class
can be used on each element of an iter in sequence and the result
will be as if the corresponding tree was reduced. This tries to
bridge the gap between the tree nature of directories, and the
iterator nature of the connection between hosts and the temporal
order in which the files are processed.
"""
def __init__(self, branch_class, branch_args):
"""ITR initializer"""
self.branch_class = branch_class
self.branch_args = branch_args
self.index = None
self.root_branch = branch_class(*branch_args)
self.branches = [self.root_branch]
def finish_branches(self, index):
"""Run Finish() on all branches index has passed
When we pass out of a branch, delete it and process it with
the parent. The innermost branches will be the last in the
list. Return None if we are out of the entire tree, and 1
otherwise.
"""
branches = self.branches
while 1:
to_be_finished = branches[-1]
base_index = to_be_finished.base_index
if base_index != index[:len(base_index)]:
# out of the tree, finish with to_be_finished
to_be_finished.call_end_proc()
del branches[-1]
if not branches: return None
branches[-1].branch_process(to_be_finished)
else: return 1
def add_branch(self, index):
"""Return branch of type self.branch_class, add to branch list"""
branch = self.branch_class(*self.branch_args)
branch.base_index = index
self.branches.append(branch)
return branch
def process_w_branch(self, branch, args):
"""Run start_process on latest branch"""
Robust.check_common_error(branch.on_error,
branch.start_process, args)
if not branch.caught_exception: branch.start_successful = 1
def Finish(self):
"""Call at end of sequence to tie everything up"""
while 1:
to_be_finished = self.branches.pop()
to_be_finished.call_end_proc()
if not self.branches: break
self.branches[-1].branch_process(to_be_finished)
def __call__(self, *args):
"""Process args, where args[0] is current position in iterator
Returns true if args successfully processed, false if index is
not in the current tree and thus the final result is
available.
Also note below we set self.index after doing the necessary
start processing, in case there is a crash in the middle.
"""
index = args[0]
if self.index is None:
self.root_branch.base_index = index
self.process_w_branch(self.root_branch, args)
self.index = index
return 1
if index <= self.index:
Log("Warning: oldindex %s >= newindex %s" % (self.index, index), 2)
return 1
if self.finish_branches(index) is None:
return None # We are no longer in the main tree
last_branch = self.branches[-1]
if last_branch.start_successful:
if last_branch.can_fast_process(*args):
last_branch.fast_process(*args)
else:
branch = self.add_branch(index)
self.process_w_branch(branch, args)
else: last_branch.log_prev_error(index)
self.index = index
return 1
class ITRBranch:
"""Helper class for IterTreeReducer below
There are five stub functions below: start_process, end_process,
branch_process, can_fast_process, and fast_process. A class that
subclasses this one will probably fill in these functions to do
more.
It is important that this class be pickable, so keep that in mind
when subclassing (this is used to resume failed sessions).
"""
base_index = index = None
finished = None
caught_exception = start_successful = None
def call_end_proc(self):
"""Runs the end_process on self, checking for errors"""
if self.finished or not self.start_successful:
self.caught_exception = 1
if self.caught_exception: self.log_prev_error(self.base_index)
else: Robust.check_common_error(self.on_error, self.end_process)
self.finished = 1
def start_process(self, *args):
"""Do some initial processing (stub)"""
pass
def end_process(self):
"""Do any final processing before leaving branch (stub)"""
pass
def branch_process(self, branch):
"""Process a branch right after it is finished (stub)"""
assert branch.finished
pass
def can_fast_process(self, *args):
"""True if object can be processed without new branch (stub)"""
return None
def fast_process(self, *args):
"""Process args without new child branch (stub)"""
pass
def on_error(self, exc, *args):
"""This is run on any exception in start/end-process"""
self.caught_exception = 1
if args and args[0] and isinstance(args[0], tuple):
filename = os.path.join(*args[0])
elif self.index: filename = os.path.join(*self.index)
else: filename = "."
Log("Error '%s' processing %s" % (exc, filename), 2)
def log_prev_error(self, index):
"""Call function if no pending exception"""
Log("Skipping %s because of previous error" %
(os.path.join(*index),), 2)
# Put at bottom to prevent (viciously) circular module dependencies
from robust import *
from log import *
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
"""Manage logging, displaying and recording messages with required verbosity""" """Manage logging, displaying and recording messages with required verbosity"""
import time, sys, traceback, types import time, sys, traceback, types
import Globals
class LoggerError(Exception): pass class LoggerError(Exception): pass
...@@ -151,6 +152,7 @@ class Logger: ...@@ -151,6 +152,7 @@ class Logger:
def FatalError(self, message): def FatalError(self, message):
self("Fatal Error: " + message, 1) self("Fatal Error: " + message, 1)
import Main
Main.cleanup() Main.cleanup()
sys.exit(1) sys.exit(1)
...@@ -180,4 +182,4 @@ class Logger: ...@@ -180,4 +182,4 @@ class Logger:
logging_func(self.exception_to_string(), verbosity) logging_func(self.exception_to_string(), verbosity)
Log = Logger() Log = Logger()
import Globals, Main
...@@ -20,91 +20,86 @@ ...@@ -20,91 +20,86 @@
"""list, delete, and otherwise manage increments""" """list, delete, and otherwise manage increments"""
from __future__ import generators from __future__ import generators
from static import * from log import Log
from log import * import Globals, Time, static, manage
import Globals, Time
class ManageException(Exception): pass class ManageException(Exception): pass
class Manage: def get_file_type(rp):
def get_file_type(rp): """Returns one of "regular", "directory", "missing", or "special"."""
"""Returns one of "regular", "directory", "missing", or "special".""" if not rp.lstat(): return "missing"
if not rp.lstat(): return "missing" elif rp.isdir(): return "directory"
elif rp.isdir(): return "directory" elif rp.isreg(): return "regular"
elif rp.isreg(): return "regular" else: return "special"
else: return "special"
def get_inc_type(inc):
def get_inc_type(inc): """Return file type increment represents"""
"""Return file type increment represents""" assert inc.isincfile()
assert inc.isincfile() type = inc.getinctype()
type = inc.getinctype() if type == "dir": return "directory"
if type == "dir": return "directory" elif type == "diff": return "regular"
elif type == "diff": return "regular" elif type == "missing": return "missing"
elif type == "missing": return "missing" elif type == "snapshot": return get_file_type(inc)
elif type == "snapshot": return Manage.get_file_type(inc) else: assert None, "Unknown type %s" % (type,)
else: assert None, "Unknown type %s" % (type,)
def describe_incs_parsable(incs, mirror_time, mirrorrp):
def describe_incs_parsable(incs, mirror_time, mirrorrp): """Return a string parsable by computer describing the increments
"""Return a string parsable by computer describing the increments
Each line is a time in seconds of the increment, and then the
Each line is a time in seconds of the increment, and then the type of the file. It will be sorted oldest to newest. For example:
type of the file. It will be sorted oldest to newest. For example:
10000 regular
10000 regular 20000 directory
20000 directory 30000 special
30000 special 40000 missing
40000 missing 50000 regular <- last will be the current mirror
50000 regular <- last will be the current mirror
"""
""" incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs] incpairs.sort()
incpairs.sort() result = ["%s %s" % (time, get_inc_type(inc)) for time, inc in incpairs]
result = ["%s %s" % (time, Manage.get_inc_type(inc)) result.append("%s %s" % (mirror_time, get_file_type(mirrorrp)))
for time, inc in incpairs] return "\n".join(result)
result.append("%s %s" % (mirror_time, Manage.get_file_type(mirrorrp)))
return "\n".join(result) def describe_incs_human(incs, mirror_time, mirrorrp):
"""Return a string describing all the the root increments"""
def describe_incs_human(incs, mirror_time, mirrorrp): incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
"""Return a string describing all the the root increments""" incpairs.sort()
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs.sort() result = ["Found %d increments:" % len(incpairs)]
for time, inc in incpairs:
result = ["Found %d increments:" % len(incpairs)] result.append(" %s %s" %
for time, inc in incpairs: (inc.dirsplit()[1], Time.timetopretty(time)))
result.append(" %s %s" % result.append("Current mirror: %s" % Time.timetopretty(mirror_time))
(inc.dirsplit()[1], Time.timetopretty(time))) return "\n".join(result)
result.append("Current mirror: %s" % Time.timetopretty(mirror_time))
return "\n".join(result) def delete_earlier_than(baserp, time):
"""Deleting increments older than time in directory baserp
def delete_earlier_than(baserp, time):
"""Deleting increments older than time in directory baserp time is in seconds. It will then delete any empty directories
in the tree. To process the entire backup area, the
time is in seconds. It will then delete any empty directories rdiff-backup-data directory should be the root of the tree.
in the tree. To process the entire backup area, the
rdiff-backup-data directory should be the root of the tree. """
baserp.conn.manage.delete_earlier_than_local(baserp, time)
"""
baserp.conn.Manage.delete_earlier_than_local(baserp, time) def delete_earlier_than_local(baserp, time):
"""Like delete_earlier_than, but run on local connection for speed"""
def delete_earlier_than_local(baserp, time): assert baserp.conn is Globals.local_connection
"""Like delete_earlier_than, but run on local connection for speed""" def yield_files(rp):
assert baserp.conn is Globals.local_connection yield rp
def yield_files(rp): if rp.isdir():
yield rp for filename in rp.listdir():
if rp.isdir(): for sub_rp in yield_files(rp.append(filename)):
for filename in rp.listdir(): yield sub_rp
for sub_rp in yield_files(rp.append(filename)):
yield sub_rp for rp in yield_files(baserp):
if ((rp.isincfile() and
for rp in yield_files(baserp): Time.stringtotime(rp.getinctime()) < time) or
if ((rp.isincfile() and (rp.isdir() and not rp.listdir())):
Time.stringtotime(rp.getinctime()) < time) or Log("Deleting increment file %s" % rp.path, 5)
(rp.isdir() and not rp.listdir())): rp.delete()
Log("Deleting increment file %s" % rp.path, 5)
rp.delete()
MakeStatic(Manage)
class IncObj: class IncObj:
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -26,9 +26,8 @@ documentation on what this code does can be found on the man page. ...@@ -26,9 +26,8 @@ documentation on what this code does can be found on the man page.
from __future__ import generators from __future__ import generators
import re import re
from log import * from log import Log
from robust import * import FilenameMapping, robust, rpath, Globals
import FilenameMapping
class SelectError(Exception): class SelectError(Exception):
...@@ -81,7 +80,7 @@ class Select: ...@@ -81,7 +80,7 @@ class Select:
# This re should not match normal filenames, but usually just globs # This re should not match normal filenames, but usually just globs
glob_re = re.compile("(.*[*?[]|ignorecase\\:)", re.I | re.S) glob_re = re.compile("(.*[*?[]|ignorecase\\:)", re.I | re.S)
def __init__(self, rpath, quoted_filenames = None): def __init__(self, rootrp, quoted_filenames = None):
"""Select initializer. rpath is the root directory """Select initializer. rpath is the root directory
When files have quoted characters in them, quoted_filenames When files have quoted characters in them, quoted_filenames
...@@ -89,9 +88,9 @@ class Select: ...@@ -89,9 +88,9 @@ class Select:
version. version.
""" """
assert isinstance(rpath, RPath) assert isinstance(rootrp, rpath.RPath)
self.selection_functions = [] self.selection_functions = []
self.rpath = rpath self.rpath = rootrp
self.prefix = self.rpath.path self.prefix = self.rpath.path
self.quoting_on = Globals.quoting_enabled and quoted_filenames self.quoting_on = Globals.quoting_enabled and quoted_filenames
...@@ -141,8 +140,8 @@ class Select: ...@@ -141,8 +140,8 @@ class Select:
and should be included iff something inside is included. and should be included iff something inside is included.
""" """
for filename in Robust.listrp(rpath): for filename in robust.listrp(rpath):
new_rpath = Robust.check_common_error(error_handler, new_rpath = robust.check_common_error(error_handler,
rpath.append, (filename,)) rpath.append, (filename,))
if new_rpath: if new_rpath:
s = sel_func(new_rpath) s = sel_func(new_rpath)
...@@ -204,12 +203,12 @@ class Select: ...@@ -204,12 +203,12 @@ class Select:
return None return None
if self.quoting_on: if self.quoting_on:
for subdir in FilenameMapping.get_quoted_dir_children(rpath): for subdir in get_quoted_dir_children(rpath):
for rp in rec_func(subdir, rec_func, sel_func): for rp in rec_func(subdir, rec_func, sel_func):
yield rp yield rp
else: else:
for filename in Robust.listrp(rpath): for filename in robust.listrp(rpath):
new_rp = Robust.check_common_error( new_rp = robust.check_common_error(
error_handler, rpath.append, [filename]) error_handler, rpath.append, [filename])
if new_rp: if new_rp:
for rp in rec_func(new_rp, rec_func, sel_func): for rp in rec_func(new_rp, rec_func, sel_func):
...@@ -646,3 +645,22 @@ probably isn't what you meant.""" % ...@@ -646,3 +645,22 @@ probably isn't what you meant.""" %
return res return res
def get_quoted_dir_children(rpath):
"""For rpath directory, return list of quoted children in dir
This used to be in FilenameMapping, but was moved because it
depends on the robust.listrp routine.
"""
if not rpath.isdir(): return []
dir_pairs = [(FilenameMapping.unquote(filename), filename)
for filename in robust.listrp(rpath)]
dir_pairs.sort() # sort by real index, not quoted part
child_list = []
for unquoted, filename in dir_pairs:
childrp = rpath.append(unquoted)
childrp.quote_path()
child_list.append(childrp)
return child_list
...@@ -19,9 +19,8 @@ ...@@ -19,9 +19,8 @@
"""Generate and process aggregated backup information""" """Generate and process aggregated backup information"""
from lazy import * import re, os
import re import Globals, TempFile, robust, Time, rorpiter
class StatsException(Exception): pass class StatsException(Exception): pass
...@@ -216,12 +215,12 @@ class StatsObj: ...@@ -216,12 +215,12 @@ class StatsObj:
def write_stats_to_rp(self, rp): def write_stats_to_rp(self, rp):
"""Write statistics string to given rpath""" """Write statistics string to given rpath"""
tf = TempFileManager.new(rp) tf = TempFile.new(rp)
def init_thunk(): def init_thunk():
fp = tf.open("w") fp = tf.open("w")
fp.write(self.get_stats_string()) fp.write(self.get_stats_string())
fp.close() fp.close()
Robust.make_tf_robustaction(init_thunk, (tf,), (rp,)).execute() robust.make_tf_robustaction(init_thunk, (tf,), (rp,)).execute()
def read_stats_from_rp(self, rp): def read_stats_from_rp(self, rp):
"""Set statistics from rpath, return self for convenience""" """Set statistics from rpath, return self for convenience"""
...@@ -264,7 +263,7 @@ class StatsObj: ...@@ -264,7 +263,7 @@ class StatsObj:
return s return s
class StatsITRB(ITRBranch, StatsObj): class ITRB(rorpiter.ITRBranch, StatsObj):
"""Keep track of per directory statistics """Keep track of per directory statistics
This is subclassed by the mirroring and incrementing ITRs. This is subclassed by the mirroring and incrementing ITRs.
...@@ -339,7 +338,6 @@ class StatsITRB(ITRBranch, StatsObj): ...@@ -339,7 +338,6 @@ class StatsITRB(ITRBranch, StatsObj):
self.__dict__[attr] += branch.__dict__[attr] self.__dict__[attr] += branch.__dict__[attr]
from log import *
from increment import *
from robust import *
import Globals
"""commontest - Some functions and constants common to several test cases""" """commontest - Some functions and constants common to several test cases"""
import os, sys import os, sys
from rdiff_backup.rpath import * from rdiff_backup.log import Log
from rdiff_backup.destructive_stepping import * from rdiff_backup.rpath import RPath
from rdiff_backup.highlevel import * from rdiff_backup import Globals, Hardlink, SetConnections, Main, \
from rdiff_backup import Globals, Hardlink, SetConnections, Main selection, highlevel, lazy, Time, rpath
SourceDir = "../src" SourceDir = "../src"
AbsCurdir = os.getcwd() # Absolute path name of current directory AbsCurdir = os.getcwd() # Absolute path name of current directory
...@@ -13,7 +13,7 @@ __no_execute__ = 1 # Keeps the actual rdiff-backup program from running ...@@ -13,7 +13,7 @@ __no_execute__ = 1 # Keeps the actual rdiff-backup program from running
def Myrm(dirstring): def Myrm(dirstring):
"""Run myrm on given directory string""" """Run myrm on given directory string"""
assert not os.system("%s/myrm %s" % (MiscDir, dirstring)) assert not os.system("rm -rf %s" % (dirstring,))
def Make(): def Make():
"""Make sure the rdiff-backup script in the source dir is up-to-date""" """Make sure the rdiff-backup script in the source dir is up-to-date"""
...@@ -96,8 +96,8 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir): ...@@ -96,8 +96,8 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir):
""" """
# Save attributes of root to restore later # Save attributes of root to restore later
src_root = RPath(Globals.local_connection, src_dir) src_root = rpath.RPath(Globals.local_connection, src_dir)
dest_root = RPath(Globals.local_connection, dest_dir) dest_root = rpath.RPath(Globals.local_connection, dest_dir)
dest_rbdir = dest_root.append("rdiff-backup-data") dest_rbdir = dest_root.append("rdiff-backup-data")
dest_incdir = dest_rbdir.append("increments") dest_incdir = dest_rbdir.append("increments")
...@@ -109,9 +109,9 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir): ...@@ -109,9 +109,9 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir):
InternalBackup(source_local, dest_local, src_dir, dest_dir) InternalBackup(source_local, dest_local, src_dir, dest_dir)
dest_root.setdata() dest_root.setdata()
dest_rbdir.delete() Myrm(dest_rbdir.path)
# Restore old attributes # Restore old attributes
RPathStatic.copy_attribs(src_root, dest_root) rpath.copy_attribs(src_root, dest_root)
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time): def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
"""Restore mirror_dir to dest_dir at given time """Restore mirror_dir to dest_dir at given time
...@@ -133,7 +133,7 @@ def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time): ...@@ -133,7 +133,7 @@ def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
mirror_rp, dest_rp = cmd_schemas2rps([mirror_dir, dest_dir], remote_schema) mirror_rp, dest_rp = cmd_schemas2rps([mirror_dir, dest_dir], remote_schema)
Time.setcurtime() Time.setcurtime()
inc = get_increment_rp(mirror_rp, time) inc = get_increment_rp(mirror_rp, time)
if inc: Main.restore(get_increment_rp(mirror_rp, time), dest_rp) if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp)
else: # use alternate syntax else: # use alternate syntax
Main.restore_timestr = str(time) Main.restore_timestr = str(time)
Main.RestoreAsOf(mirror_rp, dest_rp) Main.RestoreAsOf(mirror_rp, dest_rp)
...@@ -173,7 +173,8 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1, ...@@ -173,7 +173,8 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
Log("Comparing %s and %s, hardlinks %s" % (src_rp.path, dest_rp.path, Log("Comparing %s and %s, hardlinks %s" % (src_rp.path, dest_rp.path,
compare_hardlinks), 3) compare_hardlinks), 3)
src_select, dest_select = Select(src_rp), Select(dest_rp) src_select = selection.Select(src_rp)
dest_select = selection.Select(dest_rp)
if ignore_tmp_files: if ignore_tmp_files:
# Ignoring temp files can be useful when we want to check the # Ignoring temp files can be useful when we want to check the
...@@ -231,16 +232,17 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1, ...@@ -231,16 +232,17 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
Hardlink.get_indicies(dest_rorp, None)), 3) Hardlink.get_indicies(dest_rorp, None)), 3)
return None return None
if equality_func: result = Iter.equal(dsiter1, dsiter2, 1, equality_func) if equality_func: result = lazy.Iter.equal(dsiter1, dsiter2,
1, equality_func)
elif compare_hardlinks: elif compare_hardlinks:
dsiter1 = Hardlink.add_rorp_iter(dsiter1, 1) dsiter1 = Hardlink.add_rorp_iter(dsiter1, 1)
dsiter2 = Hardlink.add_rorp_iter(dsiter2, None) dsiter2 = Hardlink.add_rorp_iter(dsiter2, None)
if exclude_rbdir: if exclude_rbdir:
result = Iter.equal(dsiter1, dsiter2, 1, hardlink_equal) result = lazy.Iter.equal(dsiter1, dsiter2, 1, hardlink_equal)
else: result = Iter.equal(dsiter1, dsiter2, 1, rbdir_equal) else: result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
elif not exclude_rbdir: elif not exclude_rbdir:
result = Iter.equal(dsiter1, dsiter2, 1, rbdir_equal) result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
else: result = Iter.equal(dsiter1, dsiter2, 1) else: result = lazy.Iter.equal(dsiter1, dsiter2, 1)
for i in dsiter1: pass # make sure all files processed anyway for i in dsiter1: pass # make sure all files processed anyway
for i in dsiter2: pass for i in dsiter2: pass
...@@ -269,12 +271,12 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames, ...@@ -269,12 +271,12 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
""" """
Globals.set('preserve_hardlinks', compare_hardlinks) Globals.set('preserve_hardlinks', compare_hardlinks)
time = 10000 time = 10000
dest_rp = RPath(Globals.local_connection, dest_dirname) dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
restore_rp = RPath(Globals.local_connection, restore_dirname) restore_rp = rpath.RPath(Globals.local_connection, restore_dirname)
os.system(MiscDir + "/myrm " + dest_dirname) Myrm(dest_dirname)
for dirname in list_of_dirnames: for dirname in list_of_dirnames:
src_rp = RPath(Globals.local_connection, dirname) src_rp = rpath.RPath(Globals.local_connection, dirname)
reset_hardlink_dicts() reset_hardlink_dicts()
_reset_connections(src_rp, dest_rp) _reset_connections(src_rp, dest_rp)
...@@ -287,10 +289,10 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames, ...@@ -287,10 +289,10 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
time = 10000 time = 10000
for dirname in list_of_dirnames[:-1]: for dirname in list_of_dirnames[:-1]:
reset_hardlink_dicts() reset_hardlink_dicts()
os.system(MiscDir + "/myrm " + restore_dirname) Myrm(restore_dirname)
InternalRestore(dest_local, source_local, dest_dirname, InternalRestore(dest_local, source_local, dest_dirname,
restore_dirname, time) restore_dirname, time)
src_rp = RPath(Globals.local_connection, dirname) src_rp = rpath.RPath(Globals.local_connection, dirname)
assert CompareRecursive(src_rp, restore_rp) assert CompareRecursive(src_rp, restore_rp)
# Restore should default back to newest time older than it # Restore should default back to newest time older than it
...@@ -304,11 +306,11 @@ def MirrorTest(source_local, dest_local, list_of_dirnames, ...@@ -304,11 +306,11 @@ def MirrorTest(source_local, dest_local, list_of_dirnames,
dest_dirname = "testfiles/output"): dest_dirname = "testfiles/output"):
"""Mirror each of list_of_dirnames, and compare after each""" """Mirror each of list_of_dirnames, and compare after each"""
Globals.set('preserve_hardlinks', compare_hardlinks) Globals.set('preserve_hardlinks', compare_hardlinks)
dest_rp = RPath(Globals.local_connection, dest_dirname) dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
os.system(MiscDir + "/myrm " + dest_dirname) Myrm(dest_dirname)
for dirname in list_of_dirnames: for dirname in list_of_dirnames:
src_rp = RPath(Globals.local_connection, dirname) src_rp = rpath.RPath(Globals.local_connection, dirname)
reset_hardlink_dicts() reset_hardlink_dicts()
_reset_connections(src_rp, dest_rp) _reset_connections(src_rp, dest_rp)
......
import unittest, types, tempfile, os, sys import unittest, types, tempfile, os, sys
from commontest import * from commontest import *
from rdiff_backup.connection import * from rdiff_backup.connection import *
from rdiff_backup import Globals from rdiff_backup import Globals, rpath
class LocalConnectionTest(unittest.TestCase): class LocalConnectionTest(unittest.TestCase):
"""Test the dummy connection""" """Test the dummy connection"""
...@@ -104,7 +104,7 @@ class PipeConnectionTest(unittest.TestCase): ...@@ -104,7 +104,7 @@ class PipeConnectionTest(unittest.TestCase):
"""Test module emulation""" """Test module emulation"""
assert type(self.conn.tempfile.mktemp()) is types.StringType assert type(self.conn.tempfile.mktemp()) is types.StringType
assert self.conn.os.path.join("a", "b") == "a/b" assert self.conn.os.path.join("a", "b") == "a/b"
rp1 = RPath(self.conn, self.regfilename) rp1 = rpath.RPath(self.conn, self.regfilename)
assert rp1.isreg() assert rp1.isreg()
def testVirtualFiles(self): def testVirtualFiles(self):
...@@ -112,17 +112,17 @@ class PipeConnectionTest(unittest.TestCase): ...@@ -112,17 +112,17 @@ class PipeConnectionTest(unittest.TestCase):
tempout = self.conn.open("testfiles/tempout", "w") tempout = self.conn.open("testfiles/tempout", "w")
assert isinstance(tempout, VirtualFile) assert isinstance(tempout, VirtualFile)
regfilefp = open(self.regfilename, "r") regfilefp = open(self.regfilename, "r")
RPath.copyfileobj(regfilefp, tempout) rpath.copyfileobj(regfilefp, tempout)
tempout.close() tempout.close()
regfilefp.close() regfilefp.close()
tempoutlocal = open("testfiles/tempout", "r") tempoutlocal = open("testfiles/tempout", "r")
regfilefp = open(self.regfilename, "r") regfilefp = open(self.regfilename, "r")
assert RPath.cmpfileobj(regfilefp, tempoutlocal) assert rpath.cmpfileobj(regfilefp, tempoutlocal)
tempoutlocal.close() tempoutlocal.close()
regfilefp.close() regfilefp.close()
os.unlink("testfiles/tempout") os.unlink("testfiles/tempout")
assert RPath.cmpfileobj(self.conn.open(self.regfilename, "r"), assert rpath.cmpfileobj(self.conn.open(self.regfilename, "r"),
open(self.regfilename, "r")) open(self.regfilename, "r"))
def testString(self): def testString(self):
...@@ -139,7 +139,8 @@ class PipeConnectionTest(unittest.TestCase): ...@@ -139,7 +139,8 @@ class PipeConnectionTest(unittest.TestCase):
def testRPaths(self): def testRPaths(self):
"""Test transmission of rpaths""" """Test transmission of rpaths"""
rp = RPath(self.conn, "testfiles/various_file_types/regular_file") rp = rpath.RPath(self.conn,
"testfiles/various_file_types/regular_file")
assert self.conn.reval("lambda rp: rp.data", rp) == rp.data assert self.conn.reval("lambda rp: rp.data", rp) == rp.data
assert self.conn.reval("lambda rp: rp.conn is Globals.local_connection", rp) assert self.conn.reval("lambda rp: rp.conn is Globals.local_connection", rp)
...@@ -192,7 +193,7 @@ class RedirectedConnectionTest(unittest.TestCase): ...@@ -192,7 +193,7 @@ class RedirectedConnectionTest(unittest.TestCase):
def testRpaths(self): def testRpaths(self):
"""Test moving rpaths back and forth across connections""" """Test moving rpaths back and forth across connections"""
rp = RPath(self.conna, "foo") rp = rpath.RPath(self.conna, "foo")
self.connb.Globals.set("tmp_rpath", rp) self.connb.Globals.set("tmp_rpath", rp)
rp_returned = self.connb.Globals.get("tmp_rpath") rp_returned = self.connb.Globals.get("tmp_rpath")
assert rp_returned.conn is rp.conn assert rp_returned.conn is rp.conn
......
import unittest import unittest
from commontest import * from commontest import *
from rdiff_backup.C import * from rdiff_backup import C
from rdiff_backup.rpath import * from rdiff_backup.rpath import *
class CTest(unittest.TestCase): class CTest(unittest.TestCase):
......
from __future__ import generators from __future__ import generators
import unittest import unittest
from commontest import * from commontest import *
from rdiff_backup.rpath import * from rdiff_backup import rpath, selection, Globals, destructive_stepping
from rdiff_backup.selection import *
from rdiff_backup import Globals
Log.setverbosity(4) Log.setverbosity(4)
class DSTest(unittest.TestCase): class DSTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.lc = Globals.local_connection self.lc = Globals.local_connection
self.noperms = RPath(self.lc, "testfiles/noperms") self.noperms = rpath.RPath(self.lc, "testfiles/noperms")
Globals.change_source_perms = 1 Globals.change_source_perms = 1
self.iteration_dir = RPath(self.lc, "testfiles/iteration-test") self.iteration_dir = rpath.RPath(self.lc, "testfiles/iteration-test")
def testDSIter(self): def testDSIter(self):
"""Testing destructive stepping iterator from baserp""" """Testing destructive stepping iterator from baserp"""
for i in range(2): for i in range(2):
sel = Select(DSRPath(1, self.noperms)).set_iter() sel = selection.Select(destructive_stepping.
DSRPath(1, self.noperms)).set_iter()
ds_iter = sel.iterate_with_finalizer() ds_iter = sel.iterate_with_finalizer()
noperms = ds_iter.next() noperms = ds_iter.next()
assert noperms.isdir() and noperms.getperms() == 0, \ assert noperms.isdir() and noperms.getperms() == 0, \
......
import unittest, os, re, sys import unittest, os, re, sys, time
from commontest import * from commontest import *
from rdiff_backup.log import * from rdiff_backup import Globals, log, rpath
from rdiff_backup.rpath import *
from rdiff_backup import Globals
"""Regression tests""" """Regression tests"""
Globals.exclude_mirror_regexps = [re.compile(".*/rdiff-backup-data")] Globals.exclude_mirror_regexps = [re.compile(".*/rdiff-backup-data")]
Log.setverbosity(7) log.Log.setverbosity(7)
lc = Globals.local_connection lc = Globals.local_connection
...@@ -15,7 +13,7 @@ class Local: ...@@ -15,7 +13,7 @@ class Local:
"""This is just a place to put increments relative to the local """This is just a place to put increments relative to the local
connection""" connection"""
def get_local_rp(extension): def get_local_rp(extension):
return RPath(Globals.local_connection, "testfiles/" + extension) return rpath.RPath(Globals.local_connection, "testfiles/" + extension)
vftrp = get_local_rp('various_file_types') vftrp = get_local_rp('various_file_types')
inc1rp = get_local_rp('increment1') inc1rp = get_local_rp('increment1')
...@@ -154,7 +152,7 @@ class PathSetter(unittest.TestCase): ...@@ -154,7 +152,7 @@ class PathSetter(unittest.TestCase):
"testfiles/output/rdiff-backup-data/increments") "testfiles/output/rdiff-backup-data/increments")
self.exec_rb(None, timbar_paths[0]) self.exec_rb(None, timbar_paths[0])
self.refresh(Local.timbar_in, Local.timbar_out) self.refresh(Local.timbar_in, Local.timbar_out)
assert RPath.cmp_with_attribs(Local.timbar_in, Local.timbar_out) assert rpath.cmp_with_attribs(Local.timbar_in, Local.timbar_out)
self.exec_rb_restore(25000, 'testfiles/output/various_file_types', self.exec_rb_restore(25000, 'testfiles/output/various_file_types',
'testfiles/vft2_out') 'testfiles/vft2_out')
...@@ -173,8 +171,8 @@ class PathSetter(unittest.TestCase): ...@@ -173,8 +171,8 @@ class PathSetter(unittest.TestCase):
incfiles = filter(lambda s: s.startswith(basename), incfiles = filter(lambda s: s.startswith(basename),
os.listdir(directory)) os.listdir(directory))
incfiles.sort() incfiles.sort()
incrps = map(lambda f: RPath(lc, directory+"/"+f), incfiles) incrps = map(lambda f: rpath.RPath(lc, directory+"/"+f), incfiles)
return map(lambda x: x.path, filter(RPath.isincfile, incrps)) return map(lambda x: x.path, filter(rpath.RPath.isincfile, incrps))
class Final(PathSetter): class Final(PathSetter):
...@@ -287,7 +285,7 @@ testfiles/increment2/changed_dir""") ...@@ -287,7 +285,7 @@ testfiles/increment2/changed_dir""")
"testfiles/output/changed_dir/foo") "testfiles/output/changed_dir/foo")
# Test selective restoring # Test selective restoring
mirror_rp = RPath(Globals.local_connection, "testfiles/output") mirror_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
restore_filename = get_increment_rp(mirror_rp, 10000).path restore_filename = get_increment_rp(mirror_rp, 10000).path
assert not os.system(self.rb_schema + assert not os.system(self.rb_schema +
"--include testfiles/restoretarget1/various_file_types/" "--include testfiles/restoretarget1/various_file_types/"
...@@ -321,8 +319,8 @@ testfiles/increment2/changed_dir""") ...@@ -321,8 +319,8 @@ testfiles/increment2/changed_dir""")
# Make an exclude list # Make an exclude list
os.mkdir("testfiles/vft_out") os.mkdir("testfiles/vft_out")
excluderp = RPath(Globals.local_connection, excluderp = rpath.RPath(Globals.local_connection,
"testfiles/vft_out/exclude") "testfiles/vft_out/exclude")
fp = excluderp.open("w") fp = excluderp.open("w")
fp.write(""" fp.write("""
../testfiles/various_file_types/regular_file ../testfiles/various_file_types/regular_file
...@@ -331,8 +329,8 @@ testfiles/increment2/changed_dir""") ...@@ -331,8 +329,8 @@ testfiles/increment2/changed_dir""")
assert not fp.close() assert not fp.close()
# Make an include list # Make an include list
includerp = RPath(Globals.local_connection, includerp = rpath.RPath(Globals.local_connection,
"testfiles/vft_out/include") "testfiles/vft_out/include")
fp = includerp.open("w") fp = includerp.open("w")
fp.write(""" fp.write("""
../testfiles/various_file_types/executable ../testfiles/various_file_types/executable
......
...@@ -6,7 +6,7 @@ class RemoteMirrorTest(unittest.TestCase): ...@@ -6,7 +6,7 @@ class RemoteMirrorTest(unittest.TestCase):
"""Test mirroring""" """Test mirroring"""
def setUp(self): def setUp(self):
"""Start server""" """Start server"""
Log.setverbosity(7) Log.setverbosity(3)
Globals.change_source_perms = 1 Globals.change_source_perms = 1
SetConnections.UpdateGlobal('checkpoint_interval', 3) SetConnections.UpdateGlobal('checkpoint_interval', 3)
......
import unittest, os import unittest, os, re, time
from commontest import * from commontest import *
from rdiff_backup.log import * from rdiff_backup import log, rpath, restore, increment, Time, \
from rdiff_backup.rpath import * Rdiff, statistics
from rdiff_backup.restore import *
lc = Globals.local_connection lc = Globals.local_connection
Globals.change_source_perms = 1 Globals.change_source_perms = 1
Log.setverbosity(3) Log.setverbosity(3)
def getrp(ending): def getrp(ending):
return RPath(lc, "testfiles/various_file_types/" + ending) return rpath.RPath(lc, "testfiles/various_file_types/" + ending)
rf = getrp("regular_file") rf = getrp("regular_file")
rf2 = getrp("two_hardlinked_files1") rf2 = getrp("two_hardlinked_files1")
...@@ -22,11 +21,11 @@ dir = getrp(".") ...@@ -22,11 +21,11 @@ dir = getrp(".")
sym = getrp("symbolic_link") sym = getrp("symbolic_link")
nothing = getrp("nothing") nothing = getrp("nothing")
target = RPath(lc, "testfiles/out") target = rpath.RPath(lc, "testfiles/out")
out2 = RPath(lc, "testfiles/out2") out2 = rpath.RPath(lc, "testfiles/out2")
out_gz = RPath(lc, "testfiles/out.gz") out_gz = rpath.RPath(lc, "testfiles/out.gz")
Time.setprevtime(999424113.24931) Time.setprevtime(999424113)
prevtimestr = "2001-09-02T02:48:33-07:00" prevtimestr = "2001-09-02T02:48:33-07:00"
t_pref = "testfiles/out.2001-09-02T02:48:33-07:00" t_pref = "testfiles/out.2001-09-02T02:48:33-07:00"
t_diff = "testfiles/out.2001-09-02T02:48:33-07:00.diff" t_diff = "testfiles/out.2001-09-02T02:48:33-07:00.diff"
...@@ -39,78 +38,72 @@ class inctest(unittest.TestCase): ...@@ -39,78 +38,72 @@ class inctest(unittest.TestCase):
def setUp(self): def setUp(self):
Globals.set('isbackup_writer',1) Globals.set('isbackup_writer',1)
def check_time(self, rp):
"""Make sure that rp is an inc file, and time is Time.prevtime"""
assert rp.isincfile(), rp
t = Time.stringtotime(rp.getinctime())
assert t == Time.prevtime, (t, Time.prevtime)
def testreg(self): def testreg(self):
"""Test increment of regular files""" """Test increment of regular files"""
Globals.compression = None Globals.compression = None
target.setdata() target.setdata()
if target.lstat(): target.delete() if target.lstat(): target.delete()
rpd = RPath(lc, t_diff) rpd = rpath.RPath(lc, t_diff)
if rpd.lstat(): rpd.delete() if rpd.lstat(): rpd.delete()
Inc.Increment(rf, exec1, target) diffrp = increment.Increment(rf, exec1, target)
rpd.setdata() assert diffrp.isreg(), diffrp
assert rpd.isreg(), rpd assert rpath.cmp_attribs(diffrp, exec1)
assert RPath.cmp_attribs(rpd, exec1) self.check_time(diffrp)
rpd.delete() assert diffrp.getinctype() == 'diff', diffrp.getinctype()
diffrp.delete()
def testmissing(self): def testmissing(self):
"""Test creation of missing files""" """Test creation of missing files"""
Inc.Increment(rf, nothing, target) missing_rp = increment.Increment(rf, nothing, target)
rp = RPath(lc, t_pref + ".missing") self.check_time(missing_rp)
assert rp.lstat() assert missing_rp.getinctype() == 'missing'
rp.delete() missing_rp.delete()
def testsnapshot(self): def testsnapshot(self):
"""Test making of a snapshot""" """Test making of a snapshot"""
Globals.compression = None Globals.compression = None
rp = RPath(lc, t_pref + ".snapshot") snap_rp = increment.Increment(rf, sym, target)
if rp.lstat(): rp.delete() self.check_time(snap_rp)
Inc.Increment(rf, sym, target) assert rpath.cmp_attribs(snap_rp, sym)
rp.setdata() assert rpath.cmp(snap_rp, sym)
assert rp.lstat() snap_rp.delete()
assert RPath.cmp_attribs(rp, sym)
assert RPath.cmp(rp, sym) snap_rp2 = increment.Increment(sym, rf, target)
rp.delete() self.check_time(snap_rp2)
assert rpath.cmp_attribs(snap_rp2, rf)
rp = RPath(lc, t_pref + ".snapshot") assert rpath.cmp(snap_rp2, rf)
if rp.lstat(): rp.delete() snap_rp2.delete()
Inc.Increment(sym, rf, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf)
assert RPath.cmp(rp, rf)
rp.delete()
def testGzipsnapshot(self): def testGzipsnapshot(self):
"""Test making a compressed snapshot""" """Test making a compressed snapshot"""
Globals.compression = 1 Globals.compression = 1
rp = RPath(lc, t_pref + ".snapshot") rp = increment.Increment(rf, sym, target)
if rp.lstat(): rp.delete() self.check_time(rp)
Inc.Increment(rf, sym, target) assert rpath.cmp_attribs(rp, sym)
rp.setdata() assert rpath.cmp(rp, sym)
assert rp.lstat()
assert RPath.cmp_attribs(rp, sym)
assert RPath.cmp(rp, sym)
rp.delete() rp.delete()
rp = RPath(lc, t_pref + ".snapshot.gz") rp = increment.Increment(sym, rf, target)
if rp.lstat(): rp.delete() self.check_time(rp)
Inc.Increment(sym, rf, target) assert rpath.cmp_attribs(rp, rf)
rp.setdata() assert rpath.cmpfileobj(rp.open("rb", 1), rf.open("rb"))
assert rp.isinccompressed()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf)
assert RPath.cmpfileobj(rp.open("rb", 1), rf.open("rb"))
rp.delete() rp.delete()
def testdir(self): def testdir(self):
"""Test increment on dir""" """Test increment on dir"""
Inc.Increment(sym, dir, target) rp = increment.Increment(sym, dir, target)
rp = RPath(lc, t_pref + ".dir") self.check_time(rp)
rp2 = RPath(lc, t_pref)
assert rp.lstat() assert rp.lstat()
assert target.isdir() assert target.isdir()
assert RPath.cmp_attribs(dir, rp) assert rpath.cmp_attribs(dir, rp)
assert rp.isreg() assert rp.isreg()
rp.delete() rp.delete()
target.delete() target.delete()
...@@ -118,46 +111,36 @@ class inctest(unittest.TestCase): ...@@ -118,46 +111,36 @@ class inctest(unittest.TestCase):
def testDiff(self): def testDiff(self):
"""Test making diffs""" """Test making diffs"""
Globals.compression = None Globals.compression = None
rp = RPath(lc, t_pref + '.diff') rp = increment.Increment(rf, rf2, target)
if rp.lstat(): rp.delete() self.check_time(rp)
Inc.Increment(rf, rf2, target) assert rpath.cmp_attribs(rp, rf2)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2).execute() Rdiff.patch_action(rf, rp, out2).execute()
assert RPath.cmp(rf2, out2) assert rpath.cmp(rf2, out2)
rp.delete() rp.delete()
out2.delete() out2.delete()
def testGzipDiff(self): def testGzipDiff(self):
"""Test making gzipped diffs""" """Test making gzipped diffs"""
Globals.compression = 1 Globals.compression = 1
rp = RPath(lc, t_pref + '.diff.gz') rp = increment.Increment(rf, rf2, target)
if rp.lstat(): rp.delete() self.check_time(rp)
Inc.Increment(rf, rf2, target) assert rpath.cmp_attribs(rp, rf2)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2, delta_compressed = 1).execute() Rdiff.patch_action(rf, rp, out2, delta_compressed = 1).execute()
assert RPath.cmp(rf2, out2) assert rpath.cmp(rf2, out2)
rp.delete() rp.delete()
out2.delete() out2.delete()
def testGzipRegexp(self): def testGzipRegexp(self):
"""Here a .gz file shouldn't be compressed""" """Here a .gz file shouldn't be compressed"""
Globals.compression = 1 Globals.compression = 1
RPath.copy(rf, out_gz) rpath.copy(rf, out_gz)
assert out_gz.lstat() assert out_gz.lstat()
rp = RPath(lc, t_pref + '.diff') rp = increment.Increment(rf, out_gz, target)
if rp.lstat(): rp.delete() self.check_time(rp)
assert rpath.cmp_attribs(rp, out_gz)
Inc.Increment(rf, out_gz, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, out_gz)
Rdiff.patch_action(rf, rp, out2).execute() Rdiff.patch_action(rf, rp, out2).execute()
assert RPath.cmp(out_gz, out2) assert rpath.cmp(out_gz, out2)
rp.delete() rp.delete()
out2.delete() out2.delete()
out_gz.delete() out_gz.delete()
...@@ -194,8 +177,8 @@ class inctest2(unittest.TestCase): ...@@ -194,8 +177,8 @@ class inctest2(unittest.TestCase):
InternalBackup(1, 1, "testfiles/stattest2", "testfiles/output", InternalBackup(1, 1, "testfiles/stattest2", "testfiles/output",
time.time()+1) time.time()+1)
rbdir = RPath(Globals.local_connection, rbdir = rpath.RPath(Globals.local_connection,
"testfiles/output/rdiff-backup-data") "testfiles/output/rdiff-backup-data")
#incs = Restore.get_inclist(rbdir.append("subdir"). #incs = Restore.get_inclist(rbdir.append("subdir").
# append("directory_statistics")) # append("directory_statistics"))
...@@ -217,14 +200,14 @@ class inctest2(unittest.TestCase): ...@@ -217,14 +200,14 @@ class inctest2(unittest.TestCase):
#assert 400000 < subdir_stats.ChangedMirrorSize < 420000 #assert 400000 < subdir_stats.ChangedMirrorSize < 420000
#assert 10 < subdir_stats.IncrementFileSize < 20000 #assert 10 < subdir_stats.IncrementFileSize < 20000
incs = Restore.get_inclist(rbdir.append("session_statistics")) incs = restore.get_inclist(rbdir.append("session_statistics"))
assert len(incs) == 2 assert len(incs) == 2
s2 = StatsObj().read_stats_from_rp(incs[0]) s2 = statistics.StatsObj().read_stats_from_rp(incs[0])
assert s2.SourceFiles == 7 assert s2.SourceFiles == 7
assert 700000 < s2.SourceFileSize < 750000 assert 700000 < s2.SourceFileSize < 750000
self.stats_check_initial(s2) self.stats_check_initial(s2)
root_stats = StatsObj().read_stats_from_rp(incs[1]) root_stats = statistics.StatsObj().read_stats_from_rp(incs[1])
assert root_stats.SourceFiles == 7, root_stats.SourceFiles assert root_stats.SourceFiles == 7, root_stats.SourceFiles
assert 550000 < root_stats.SourceFileSize < 570000 assert 550000 < root_stats.SourceFileSize < 570000
assert root_stats.MirrorFiles == 7 assert root_stats.MirrorFiles == 7
......
import unittest, StringIO import unittest, StringIO
from commontest import * from commontest import *
from rdiff_backup.iterfile import * from rdiff_backup.iterfile import *
from rdiff_backup import lazy
class testIterFile(unittest.TestCase): class testIterFile(unittest.TestCase):
...@@ -11,8 +12,8 @@ class testIterFile(unittest.TestCase): ...@@ -11,8 +12,8 @@ class testIterFile(unittest.TestCase):
def testConversion(self): def testConversion(self):
"""Test iter to file conversion""" """Test iter to file conversion"""
for itm in [self.iter1maker, self.iter2maker]: for itm in [self.iter1maker, self.iter2maker]:
assert Iter.equal(itm(), assert lazy.Iter.equal(itm(),
IterWrappingFile(FileWrappingIter(itm()))) IterWrappingFile(FileWrappingIter(itm())))
class testBufferedRead(unittest.TestCase): class testBufferedRead(unittest.TestCase):
def testBuffering(self): def testBuffering(self):
......
This diff is collapsed.
import unittest, os, cStringIO, time import unittest, os, cStringIO, time
from rdiff_backup.metadata import * from rdiff_backup.metadata import *
from rdiff_backup import rpath, Globals, selection, destructive_stepping from rdiff_backup import rpath, connection, Globals, selection, \
destructive_stepping
tempdir = rpath.RPath(Globals.local_connection, "testfiles/output") tempdir = rpath.RPath(Globals.local_connection, "testfiles/output")
...@@ -61,9 +62,8 @@ class MetadataTest(unittest.TestCase): ...@@ -61,9 +62,8 @@ class MetadataTest(unittest.TestCase):
if temprp.lstat(): return temprp if temprp.lstat(): return temprp
self.make_temp() self.make_temp()
root = rpath.RPath(Globals.local_connection, "testfiles/bigdir") rootrp = rpath.RPath(Globals.local_connection, "testfiles/bigdir")
dsrp_root = destructive_stepping.DSRPath(1, root) rpath_iter = selection.Select(rootrp).set_iter()
rpath_iter = selection.Select(dsrp_root).set_iter()
start_time = time.time() start_time = time.time()
OpenMetadata(temprp) OpenMetadata(temprp)
......
This diff is collapsed.
import unittest import unittest
from commontest import * from commontest import *
from rdiff_backup.log import * from rdiff_backup import log, restore, Globals, rpath
from rdiff_backup.restore import *
from rdiff_backup import Globals
Log.setverbosity(3) Log.setverbosity(3)
...@@ -23,26 +21,26 @@ class RestoreTest(unittest.TestCase): ...@@ -23,26 +21,26 @@ class RestoreTest(unittest.TestCase):
dirlist = os.listdir(self.prefix) dirlist = os.listdir(self.prefix)
dirlist.sort() dirlist.sort()
baselist = filter(lambda f: f.startswith(basename), dirlist) baselist = filter(lambda f: f.startswith(basename), dirlist)
rps = map(lambda f: RPath(lc, self.prefix+f), baselist) rps = map(lambda f: rpath.RPath(lc, self.prefix+f), baselist)
incs = filter(lambda rp: rp.isincfile(), rps) incs = filter(lambda rp: rp.isincfile(), rps)
tuples = map(lambda rp: (rp, RPath(lc, "%s.%s" % tuples = map(lambda rp: (rp, rpath.RPath(lc, "%s.%s" %
(rp.getincbase().path, (rp.getincbase().path,
rp.getinctime()))), rp.getinctime()))),
incs) incs)
return tuples, incs return tuples, incs
def restoreonefiletest(self, basename): def restoreonefiletest(self, basename):
tuples, incs = self.maketesttuples(basename) tuples, incs = self.maketesttuples(basename)
rpbase = RPath(lc, self.prefix + basename) rpbase = rpath.RPath(lc, self.prefix + basename)
rptarget = RPath(lc, "testfiles/outfile") rptarget = rpath.RPath(lc, "testfiles/outfile")
for pair in tuples: for pair in tuples:
print "Processing file " + pair[0].path print "Processing file " + pair[0].path
if rptarget.lstat(): rptarget.delete() if rptarget.lstat(): rptarget.delete()
rest_time = Time.stringtotime(pair[0].getinctime()) rest_time = Time.stringtotime(pair[0].getinctime())
rid = RestoreIncrementData((), rpbase, incs) rid = restore.RestoreIncrementData((), rpbase, incs)
rid.sortincseq(rest_time, 10000000000) # pick some really late time rid.sortincseq(rest_time, 10000000000) # pick some really late time
rcd = RestoreCombinedData(rid, rpbase, rptarget) rcd = restore.RestoreCombinedData(rid, rpbase, rptarget)
rcd.RestoreFile() rcd.RestoreFile()
#sorted_incs = Restore.sortincseq(rest_time, incs) #sorted_incs = Restore.sortincseq(rest_time, incs)
#Restore.RestoreFile(rest_time, rpbase, (), sorted_incs, rptarget) #Restore.RestoreFile(rest_time, rpbase, (), sorted_incs, rptarget)
...@@ -50,9 +48,9 @@ class RestoreTest(unittest.TestCase): ...@@ -50,9 +48,9 @@ class RestoreTest(unittest.TestCase):
if not rptarget.lstat(): assert not pair[1].lstat() if not rptarget.lstat(): assert not pair[1].lstat()
elif not pair[1].lstat(): assert not rptarget.lstat() elif not pair[1].lstat(): assert not rptarget.lstat()
else: else:
assert RPath.cmp(rptarget, pair[1]), \ assert rpath.cmp(rptarget, pair[1]), \
"%s %s" % (rptarget.path, pair[1].path) "%s %s" % (rptarget.path, pair[1].path)
assert RPath.cmp_attribs(rptarget, pair[1]), \ assert rpath.cmp_attribs(rptarget, pair[1]), \
"%s %s" % (rptarget.path, pair[1].path) "%s %s" % (rptarget.path, pair[1].path)
rptarget.delete() rptarget.delete()
...@@ -75,7 +73,7 @@ class RestoreTest(unittest.TestCase): ...@@ -75,7 +73,7 @@ class RestoreTest(unittest.TestCase):
for inc, incbase in tuples: for inc, incbase in tuples:
assert inc.isincfile() assert inc.isincfile()
inctime = Time.stringtotime(inc.getinctime()) inctime = Time.stringtotime(inc.getinctime())
rid1 = RestoreIncrementData(basename, incbase, incs) rid1 = restore.RestoreIncrementData(basename, incbase, incs)
rid1.sortincseq(inctime, mirror_time) rid1.sortincseq(inctime, mirror_time)
assert rid1.inc_list, rid1.inc_list assert rid1.inc_list, rid1.inc_list
# oldest increment should be exactly inctime # oldest increment should be exactly inctime
...@@ -97,8 +95,8 @@ class RestoreTest(unittest.TestCase): ...@@ -97,8 +95,8 @@ class RestoreTest(unittest.TestCase):
InternalRestore(1, 1, "testfiles/restoretest3", InternalRestore(1, 1, "testfiles/restoretest3",
"testfiles/output", 20000) "testfiles/output", 20000)
src_rp = RPath(Globals.local_connection, "testfiles/increment2") src_rp = rpath.RPath(Globals.local_connection, "testfiles/increment2")
restore_rp = RPath(Globals.local_connection, "testfiles/output") restore_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
assert CompareRecursive(src_rp, restore_rp) assert CompareRecursive(src_rp, restore_rp)
def testRestoreCorrupt(self): def testRestoreCorrupt(self):
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment