Commit e95885f3 authored by bescoto's avatar bescoto

Major refactoring - avoid use of 'from XX import *' in favor of more

normal 'import XXX' syntax.  The previous way was an artifact from
earlier versions where the whole program fit in one file.


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@252 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent 7cfed788
......@@ -27,9 +27,7 @@ them over the usual 255 character limit.
"""
import re
from log import *
from robust import *
import Globals
import Globals, log
max_filename_length = 255
......@@ -55,7 +53,7 @@ def set_init_quote_vals_local():
global chars_to_quote, quoting_char
chars_to_quote = Globals.chars_to_quote
if len(Globals.quoting_char) != 1:
Log.FatalError("Expected single character for quoting char,"
log.Log.FatalError("Expected single character for quoting char,"
"got '%s' instead" % (Globals.quoting_char,))
quoting_char = Globals.quoting_char
init_quoting_regexps()
......@@ -68,7 +66,7 @@ def init_quoting_regexps():
re.compile("[%s%s]" % (chars_to_quote, quoting_char), re.S)
unquoting_regexp = re.compile("%s[0-9]{3}" % quoting_char, re.S)
except re.error:
Log.FatalError("Error '%s' when processing char quote list %s" %
log.Log.FatalError("Error '%s' when processing char quote list %s" %
(re.error, chars_to_quote))
def quote(path):
......@@ -95,18 +93,4 @@ def unquote_single(match):
assert len(match.group()) == 4
return chr(int(match.group()[1:]))
def get_quoted_dir_children(rpath):
"""For rpath directory, return list of quoted children in dir"""
if not rpath.isdir(): return []
dir_pairs = [(unquote(filename), filename)
for filename in Robust.listrp(rpath)]
dir_pairs.sort() # sort by real index, not quoted part
child_list = []
for unquoted, filename in dir_pairs:
childrp = rpath.append(unquoted)
childrp.quote_path()
child_list.append(childrp)
return child_list
......@@ -246,7 +246,7 @@ def postset_regexp_local(name, re_string, flags):
if flags: globals()[name] = re.compile(re_string, flags)
else: globals()[name] = re.compile(re_string)
def set_select(source, rpath, tuplelist, quote_mode, *filelists):
def set_select(source, Sel_Obj, rpath, tuplelist, quote_mode, *filelists):
"""Initialize select object using tuplelist
Note that each list in filelists must each be passed as
......@@ -256,12 +256,8 @@ def set_select(source, rpath, tuplelist, quote_mode, *filelists):
"""
global select_source, select_mirror
sel = Select(rpath, quote_mode)
sel = Sel_Obj(rpath, quote_mode)
sel.ParseArgs(tuplelist, filelists)
if source: select_source = sel
else: select_mirror = sel
from rpath import * # kludge to avoid circularity - not needed in this module
from log import * # another kludge
from selection import *
......@@ -32,7 +32,7 @@ side. The source side should only transmit inode information.
from __future__ import generators
import cPickle
import Globals, Time, TempFile, rpath, log, robust
# In all of these lists of indicies are the values. The keys in
# _inode_ ones are (inode, devloc) pairs.
......@@ -138,7 +138,7 @@ def restore_link(index, rpath):
for linked_index in _src_index_indicies[index]:
if linked_index in _restore_index_path:
srcpath = _restore_index_path[linked_index]
Log("Restoring %s by hard linking to %s" %
log.Log("Restoring %s by hard linking to %s" %
(rpath.path, srcpath), 6)
rpath.hardlink(srcpath)
return 1
......@@ -148,7 +148,7 @@ def restore_link(index, rpath):
def link_rp(src_rorp, dest_rpath, dest_root = None):
"""Make dest_rpath into a link analogous to that of src_rorp"""
if not dest_root: dest_root = dest_rpath # use base of dest_rpath
dest_link_rpath = RPath(dest_root.conn, dest_root.base,
dest_link_rpath = rpath.RPath(dest_root.conn, dest_root.base,
get_indicies(src_rorp, 1)[0])
dest_rpath.hardlink(dest_link_rpath.path)
......@@ -161,13 +161,13 @@ def write_linkdict(rpath, dict, compress = None):
"""
assert (Globals.isbackup_writer and
rpath.conn is Globals.local_connection)
tf = TempFileManager.new(rpath)
tf = TempFile.new(rpath)
def init():
fp = tf.open("wb", compress)
cPickle.dump(dict, fp)
assert not fp.close()
tf.setdata()
Robust.make_tf_robustaction(init, (tf,), (rpath,)).execute()
robust.make_tf_robustaction(init, (tf,), (rpath,)).execute()
def get_linkrp(data_rpath, time, prefix):
"""Return RPath of linkdata, or None if cannot find"""
......@@ -191,7 +191,7 @@ def final_writedata():
"""Write final checkpoint data to rbdir after successful backup"""
global final_inc
if _src_index_indicies:
Log("Writing hard link data", 6)
log.Log("Writing hard link data", 6)
if Globals.compression:
final_inc = Globals.rbdir.append("hardlink_data.%s.data.gz" %
Time.curtimestr)
......@@ -218,7 +218,7 @@ def final_checkpoint(data_rpath):
after every 20 seconds or whatever, but just at the end.
"""
Log("Writing intermediate hard link data to disk", 2)
log.Log("Writing intermediate hard link data to disk", 2)
src_inode_rp = data_rpath.append("hardlink_source_inode_checkpoint."
"%s.data" % Time.curtimestr)
src_index_rp = data_rpath.append("hardlink_source_index_checkpoint."
......@@ -251,7 +251,7 @@ def retrieve_checkpoint(data_rpath, time):
dest_index = get_linkdata(data_rpath, time,
"hardlink_dest_index_checkpoint")
except cPickle.UnpicklingError:
Log("Unpickling Error", 2)
log.Log("Unpickling Error", 2)
return None
if (src_inode is None or src_index is None or
dest_inode is None or dest_index is None): return None
......@@ -271,7 +271,3 @@ def remove_all_checkpoints():
rp.delete()
from log import *
from robust import *
from rpath import *
import Globals, Time
......@@ -20,16 +20,10 @@
"""Start (and end) here - read arguments, set global settings, etc."""
from __future__ import generators
import getopt, sys, re
from log import *
from lazy import *
from connection import *
from rpath import *
from robust import *
from restore import *
from highlevel import *
from manage import *
import Globals, Time, SetConnections
import getopt, sys, re, os
from log import Log
import Globals, Time, SetConnections, selection, robust, rpath, \
manage, highlevel, connection, restore, FilenameMapping, Security
action = None
......@@ -164,7 +158,7 @@ def set_action():
if l == 0: commandline_error("No arguments given")
elif l == 1: action = "restore"
elif l == 2:
if RPath(Globals.local_connection, args[0]).isincfile():
if rpath.RPath(Globals.local_connection, args[0]).isincfile():
action = "restore"
else: action = "backup"
else: commandline_error("Too many arguments given")
......@@ -207,13 +201,14 @@ def misc_setup(rps):
Globals.postset_regexp('no_compression_regexp',
Globals.no_compression_regexp_string)
for conn in Globals.connections: Robust.install_signal_handlers()
for conn in Globals.connections: robust.install_signal_handlers()
def take_action(rps):
"""Do whatever action says"""
if action == "server": PipeConnection(sys.stdin, sys.stdout).Server()
if action == "server":
connection.PipeConnection(sys.stdin, sys.stdout).Server()
elif action == "backup": Backup(rps[0], rps[1])
elif action == "restore": restore(*rps)
elif action == "restore": Restore(*rps)
elif action == "restore-as-of": RestoreAsOf(rps[0], rps[1])
elif action == "test-server": SetConnections.TestConnections()
elif action == "list-changed-since": ListChangedSince(rps[0])
......@@ -247,14 +242,16 @@ def Backup(rpin, rpout):
backup_init_dirs(rpin, rpout)
if prevtime:
Time.setprevtime(prevtime)
HighLevel.Mirror_and_increment(rpin, rpout, incdir)
else: HighLevel.Mirror(rpin, rpout, incdir)
highlevel.HighLevel.Mirror_and_increment(rpin, rpout, incdir)
else: highlevel.HighLevel.Mirror(rpin, rpout, incdir)
rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout)
def backup_init_select(rpin, rpout):
"""Create Select objects on source and dest connections"""
rpin.conn.Globals.set_select(1, rpin, select_opts, None, *select_files)
rpout.conn.Globals.set_select(0, rpout, select_mirror_opts, 1)
rpin.conn.Globals.set_select(1, selection.Select,
rpin, select_opts, None, *select_files)
rpout.conn.Globals.set_select(0, selection.Select,
rpout, select_mirror_opts, 1)
def backup_init_dirs(rpin, rpout):
"""Make sure rpin and rpout are valid, init data dir and logging"""
......@@ -273,7 +270,7 @@ def backup_init_dirs(rpin, rpout):
datadir = rpout.append("rdiff-backup-data")
SetConnections.UpdateGlobal('rbdir', datadir)
incdir = RPath(rpout.conn, os.path.join(datadir.path, "increments"))
incdir = rpath.RPath(rpout.conn, os.path.join(datadir.path, "increments"))
prevtime = backup_get_mirrortime()
if rpout.lstat():
......@@ -336,14 +333,14 @@ def backup_touch_curmirror_local(rpin, rpout):
"""
datadir = Globals.rbdir
map(RPath.delete, backup_get_mirrorrps())
map(rpath.RPath.delete, backup_get_mirrorrps())
mirrorrp = datadir.append("current_mirror.%s.%s" % (Time.curtimestr,
"data"))
Log("Touching mirror marker %s" % mirrorrp.path, 6)
mirrorrp.touch()
RPath.copy_attribs(rpin, rpout)
rpath.copy_attribs(rpin, rpout)
def restore(src_rp, dest_rp = None):
def Restore(src_rp, dest_rp = None):
"""Main restoring function
Here src_rp should be an increment file, and if dest_rp is
......@@ -373,7 +370,7 @@ def restore_common(rpin, target, time):
inc_rpath = datadir.append_path('increments', index)
restore_init_select(mirror_root, target)
restore_start_log(rpin, target, time)
Restore.Restore(inc_rpath, mirror, target, time)
restore.Restore(inc_rpath, mirror, target, time)
Log("Restore ended", 4)
def restore_start_log(rpin, target, time):
......@@ -398,7 +395,7 @@ def restore_check_paths(rpin, rpout, restoreasof = None):
Try restoring from an increment file (the filenames look like
"foobar.2001-09-01T04:49:04-07:00.diff").""" % rpin.path)
if not rpout: rpout = RPath(Globals.local_connection,
if not rpout: rpout = rpath.RPath(Globals.local_connection,
rpin.getincbase_str())
if rpout.lstat():
Log.FatalError("Restore target %s already exists, "
......@@ -413,8 +410,9 @@ def restore_init_select(rpin, rpout):
the restore operation isn't.
"""
Globals.set_select(1, rpin, select_mirror_opts, None)
Globals.set_select(0, rpout, select_opts, None, *select_files)
Globals.set_select(1, selection.Select, rpin, select_mirror_opts, None)
Globals.set_select(0, selection.Select,
rpout, select_opts, None, *select_files)
def restore_get_root(rpin):
"""Return (mirror root, index) and set the data dir
......@@ -438,7 +436,7 @@ def restore_get_root(rpin):
i = len(pathcomps)
while i >= 2:
parent_dir = RPath(rpin.conn, "/".join(pathcomps[:i]))
parent_dir = rpath.RPath(rpin.conn, "/".join(pathcomps[:i]))
if (parent_dir.isdir() and
"rdiff-backup-data" in parent_dir.listdir()): break
i = i-1
......@@ -467,11 +465,11 @@ def ListIncrements(rp):
mirror_root.append_path("rdiff-backup-data")
mirrorrp = mirror_root.new_index(index)
inc_rpath = datadir.append_path('increments', index)
incs = Restore.get_inclist(inc_rpath)
mirror_time = Restore.get_mirror_time()
incs = restore.get_inclist(inc_rpath)
mirror_time = restore.get_mirror_time()
if Globals.parsable_output:
print Manage.describe_incs_parsable(incs, mirror_time, mirrorrp)
else: print Manage.describe_incs_human(incs, mirror_time, mirrorrp)
print manage.describe_incs_parsable(incs, mirror_time, mirrorrp)
else: print manage.describe_incs_human(incs, mirror_time, mirrorrp)
def CalculateAverage(rps):
......@@ -495,7 +493,7 @@ def RemoveOlderThan(rootrp):
Log("Deleting increment(s) before %s" % timep, 4)
times_in_secs = map(lambda inc: Time.stringtotime(inc.getinctime()),
Restore.get_inclist(datadir.append("increments")))
restore.get_inclist(datadir.append("increments")))
times_in_secs = filter(lambda t: t < time, times_in_secs)
if not times_in_secs:
Log.FatalError("No increments older than %s found" % timep)
......@@ -510,7 +508,7 @@ def RemoveOlderThan(rootrp):
if len(times_in_secs) == 1:
Log("Deleting increment at time:\n" + inc_pretty_time, 3)
else: Log("Deleting increments at times:\n" + inc_pretty_time, 3)
Manage.delete_earlier_than(datadir, time)
manage.delete_earlier_than(datadir, time)
def ListChangedSince(rp):
......@@ -519,12 +517,12 @@ def ListChangedSince(rp):
except Time.TimeException, exc: Log.FatalError(str(exc))
mirror_root, index = restore_get_root(rp)
Globals.rbdir = datadir = mirror_root.append_path("rdiff-backup-data")
mirror_time = Restore.get_mirror_time()
mirror_time = restore.get_mirror_time()
def get_rids_recursive(rid):
"""Yield all the rids under rid that have inc newer than rest_time"""
yield rid
for sub_rid in Restore.yield_rids(rid, rest_time, mirror_time):
for sub_rid in restore.yield_rids(rid, rest_time, mirror_time):
for sub_sub_rid in get_rids_recursive(sub_rid): yield sub_sub_rid
def determineChangeType(incList):
......@@ -538,8 +536,8 @@ def ListChangedSince(rp):
else: return "Unknown!"
inc_rpath = datadir.append_path('increments', index)
inc_list = Restore.get_inclist(inc_rpath)
root_rid = RestoreIncrementData(index, inc_rpath, inc_list)
inc_list = restore.get_inclist(inc_rpath)
root_rid = restore.RestoreIncrementData(index, inc_rpath, inc_list)
for rid in get_rids_recursive(root_rid):
if rid.inc_list:
if not rid.index: path = "."
......
......@@ -19,8 +19,8 @@
"""Misc statistics methods, pertaining to dir and session stat files"""
from statistics import *
import time
import Globals, Hardlink, increment, log, statistics, Time
# This is the RPath of the directory statistics file, and the
# associated open file. It will hold a line of statistics for
......@@ -34,7 +34,7 @@ _dir_stats_header = """# rdiff-backup directory statistics file
#
# Each line is in the following format:
# RelativeDirName %s
""" % " ".join(StatsObj.stat_file_attrs)
""" % " ".join(statistics.StatsObj.stat_file_attrs)
def open_dir_stats_file():
"""Open directory statistics file, write header"""
......@@ -43,11 +43,11 @@ def open_dir_stats_file():
if Globals.compression: suffix = "data.gz"
else: suffix = "data"
_dir_stats_rp = Inc.get_inc(Globals.rbdir.append("directory_statistics"),
Time.curtime, suffix)
_dir_stats_rp = increment.get_inc(
Globals.rbdir.append("directory_statistics"), Time.curtime, suffix)
if _dir_stats_rp.lstat():
Log("Warning, statistics file %s already exists, appending" %
log.Log("Warning, statistics file %s already exists, appending" %
_dir_stats_rp.path, 2)
_dir_stats_fp = _dir_stats_rp.open("ab", Globals.compression)
else: _dir_stats_fp = _dir_stats_rp.open("wb", Globals.compression)
......@@ -68,8 +68,8 @@ def close_dir_stats_file():
def write_session_statistics(statobj):
"""Write session statistics into file, log"""
stat_inc = Inc.get_inc(Globals.rbdir.append("session_statistics"),
Time.curtime, "data")
stat_inc = increment.get_inc(
Globals.rbdir.append("session_statistics"), Time.curtime, "data")
statobj.StartTime = Time.curtime
statobj.EndTime = time.time()
......@@ -85,9 +85,8 @@ def write_session_statistics(statobj):
statobj.write_stats_to_rp(stat_inc)
if Globals.print_statistics:
message = statobj.get_stats_logstring("Session statistics")
Log.log_to_file(message)
log.Log.log_to_file(message)
Globals.client_conn.sys.stdout.write(message)
from increment import *
import Hardlink
......@@ -25,10 +25,10 @@ RobustAction and the like.
"""
import os, librsync
from log import Log
import robust, TempFile, Globals
class RdiffException(Exception): pass
def get_signature(rp):
"""Take signature of rpin file and return in file object"""
Log("Getting signature of %s" % rp.path, 7)
......@@ -52,9 +52,9 @@ def write_delta_action(basis, new, delta, compress = None):
before written to delta.
"""
delta_tf = TempFileManager.new(delta)
delta_tf = TempFile.new(delta)
def init(): write_delta(basis, new, delta_tf, compress)
return Robust.make_tf_robustaction(init, delta_tf, delta)
return robust.make_tf_robustaction(init, delta_tf, delta)
def write_delta(basis, new, delta, compress = None):
"""Write rdiff delta which brings basis to new"""
......@@ -74,12 +74,12 @@ def patch_action(rp_basis, rp_delta, rp_out = None, out_tf = None,
"""
if not rp_out: rp_out = rp_basis
if not out_tf: out_tf = TempFileManager.new(rp_out)
if not out_tf: out_tf = TempFile.new(rp_out)
def init():
rp_basis.conn.Rdiff.patch_local(rp_basis, rp_delta,
out_tf, delta_compressed)
out_tf.setdata()
return Robust.make_tf_robustaction(init, out_tf, rp_out)
return robust.make_tf_robustaction(init, out_tf, rp_out)
def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
"""Patch routine that must be run on rp_basis.conn
......@@ -99,20 +99,20 @@ def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
def patch_with_attribs_action(rp_basis, rp_delta, rp_out = None):
"""Like patch_action, but also transfers attributs from rp_delta"""
if not rp_out: rp_out = rp_basis
tf = TempFileManager.new(rp_out)
return Robust.chain_nested(patch_action(rp_basis, rp_delta, rp_out, tf),
Robust.copy_attribs_action(rp_delta, tf))
tf = TempFile.new(rp_out)
return robust.chain_nested(patch_action(rp_basis, rp_delta, rp_out, tf),
robust.copy_attribs_action(rp_delta, tf))
def copy_action(rpin, rpout):
"""Use rdiff to copy rpin to rpout, conserving bandwidth"""
if not rpin.isreg() or not rpout.isreg() or rpin.conn is rpout.conn:
# rdiff not applicable, fallback to regular copying
return Robust.copy_action(rpin, rpout)
return robust.copy_action(rpin, rpout)
Log("Rdiff copying %s to %s" % (rpin.path, rpout.path), 6)
out_tf = TempFileManager.new(rpout)
out_tf = TempFile.new(rpout)
def init(): rpout.conn.Rdiff.copy_local(rpin, rpout, out_tf)
return Robust.make_tf_robustaction(init, out_tf, rpout)
return robust.make_tf_robustaction(init, out_tf, rpout)
def copy_local(rpin, rpout, rpnew):
"""Write rpnew == rpin using rpout as basis. rpout and rpnew local"""
......@@ -122,6 +122,4 @@ def copy_local(rpin, rpout, rpnew):
rpnew.write_from_fileobj(librsync.PatchedFile(rpout.open("rb"), deltafile))
from log import *
from robust import *
......@@ -20,8 +20,7 @@
"""Functions to make sure remote requests are kosher"""
import sys, tempfile
import Globals, Main
from rpath import *
import Globals, Main, rpath
class Violation(Exception):
"""Exception that indicates an improper request has been received"""
......@@ -76,7 +75,7 @@ def set_security_level(action, cmdpairs):
rdir = tempfile.gettempdir()
elif islocal(cp1):
sec_level = "read-only"
rdir = Main.restore_get_root(RPath(Globals.local_connection,
rdir = Main.restore_get_root(rpath.RPath(Globals.local_connection,
getpath(cp1)))[0].path
else:
assert islocal(cp2)
......@@ -101,7 +100,7 @@ def set_security_level(action, cmdpairs):
else: assert 0, "Unknown action %s" % action
Globals.security_level = sec_level
Globals.restrict_path = RPath(Globals.local_connection,
Globals.restrict_path = rpath.RPath(Globals.local_connection,
rdir).normalize().path
def set_allowed_requests(sec_level):
......@@ -111,44 +110,46 @@ def set_allowed_requests(sec_level):
allowed_requests = ["VirtualFile.readfromid", "VirtualFile.closebyid",
"Globals.get", "Globals.is_not_None",
"Globals.get_dict_val",
"Log.open_logfile_allconn",
"Log.close_logfile_allconn",
"log.Log.open_logfile_allconn",
"log.Log.close_logfile_allconn",
"SetConnections.add_redirected_conn",
"RedirectedRun",
"sys.stdout.write"]
if sec_level == "minimal": pass
elif sec_level == "read-only" or sec_level == "update-only":
allowed_requests.extend(["C.make_file_dict",
allowed_requests.extend(
["C.make_file_dict",
"os.getuid",
"os.listdir",
"Time.setcurtime_local",
"Resume.ResumeCheck",
"HLSourceStruct.split_initial_dsiter",
"HLSourceStruct.get_diffs_and_finalize",
"RPathStatic.gzip_open_local_read",
"RPathStatic.open_local_read"])
"robust.Resume.ResumeCheck",
"highlevel.HLSourceStruct.split_initial_dsiter",
"highlevel.HLSourceStruct.get_diffs_and_finalize",
"rpath.gzip_open_local_read",
"rpath.open_local_read"])
if sec_level == "update-only":
allowed_requests. \
extend(["Log.open_logfile_local", "Log.close_logfile_local",
allowed_requests.extend(
["Log.open_logfile_local", "Log.close_logfile_local",
"Log.close_logfile_allconn", "Log.log_to_file",
"SaveState.init_filenames",
"SaveState.touch_last_file",
"HLDestinationStruct.get_sigs",
"HLDestinationStruct.patch_w_datadir_writes",
"HLDestinationStruct.patch_and_finalize",
"HLDestinationStruct.patch_increment_and_finalize",
"robust.SaveState.init_filenames",
"robust.SaveState.touch_last_file",
"highlevel.HLDestinationStruct.get_sigs",
"highlevel.HLDestinationStruct.patch_w_datadir_writes",
"highlevel.HLDestinationStruct.patch_and_finalize",
"highlevel.HLDestinationStruct.patch_increment_and_finalize",
"Main.backup_touch_curmirror_local",
"Globals.ITRB.increment_stat"])
if Globals.server:
allowed_requests.extend(["SetConnections.init_connection_remote",
allowed_requests.extend(
["SetConnections.init_connection_remote",
"Log.setverbosity",
"Log.setterm_verbosity",
"Time.setprevtime_local",
"FilenameMapping.set_init_quote_vals_local",
"Globals.postset_regexp_local",
"Globals.set_select",
"HLSourceStruct.set_session_info",
"HLDestinationStruct.set_session_info"])
"highlevel.HLSourceStruct.set_session_info",
"highlevel.HLDestinationStruct.set_session_info"])
def vet_request(request, arglist):
"""Examine request for security violations"""
......@@ -156,7 +157,7 @@ def vet_request(request, arglist):
security_level = Globals.security_level
if Globals.restrict_path:
for arg in arglist:
if isinstance(arg, RPath): vet_rpath(arg)
if isinstance(arg, rpath.RPath): vet_rpath(arg)
if security_level == "all": return
if request.function_string in allowed_requests: return
if request.function_string == "Globals.set":
......
......@@ -25,6 +25,10 @@ the related connections.
"""
import os
from log import Log
import Globals, FilenameMapping, connection, rpath
# This is the schema that determines how rdiff-backup will open a
# pipe to the remote system. If the file is given as A::B, %s will
# be substituted with A in the schema.
......@@ -68,7 +72,7 @@ def cmdpair2rp(cmd_pair):
cmd, filename = cmd_pair
if cmd: conn = init_connection(cmd)
else: conn = Globals.local_connection
return RPath(conn, filename).normalize()
return rpath.RPath(conn, filename).normalize()
def desc2cmd_pairs(desc_pair):
"""Return pair (remote_cmd, filename) from desc_pair"""
......@@ -127,7 +131,7 @@ def init_connection(remote_cmd):
Log("Executing " + remote_cmd, 4)
stdin, stdout = os.popen2(remote_cmd)
conn_number = len(Globals.connections)
conn = PipeConnection(stdout, stdin, conn_number)
conn = connection.PipeConnection(stdout, stdin, conn_number)
check_connection_version(conn, remote_cmd)
Log("Registering connection %d" % conn_number, 7)
......@@ -138,7 +142,7 @@ def init_connection(remote_cmd):
def check_connection_version(conn, remote_cmd):
"""Log warning if connection has different version"""
try: remote_version = conn.Globals.get('version')
except ConnectionReadError, exception:
except connection.ConnectionReadError, exception:
Log.FatalError("""%s
Couldn't start up the remote connection by executing
......@@ -184,7 +188,7 @@ def init_connection_remote(conn_number):
def add_redirected_conn(conn_number):
"""Run on server side - tell about redirected connection"""
Globals.connection_dict[conn_number] = \
RedirectedConnection(conn_number)
connection.RedirectedConnection(conn_number)
def UpdateGlobal(setting_name, val):
"""Update value of global variable across all connections"""
......@@ -230,9 +234,3 @@ Local version: %s
Remote version: %s""" % (Globals.version, version)
else: print "Server OK"
from log import *
from rpath import *
from connection import *
import Globals, FilenameMapping
......@@ -20,7 +20,7 @@
"""Support code for remote execution and data transfer"""
from __future__ import generators
import types, os, tempfile, cPickle, shutil, traceback, pickle, socket
import types, os, tempfile, cPickle, shutil, traceback, pickle, socket, sys
class ConnectionError(Exception): pass
......@@ -121,11 +121,13 @@ class LowLevelPipeConnection(Connection):
"""Put an object into the pipe (will send raw if string)"""
Log.conn("sending", obj, req_num)
if type(obj) is types.StringType: self._putbuf(obj, req_num)
elif isinstance(obj, Connection): self._putconn(obj, req_num)
elif isinstance(obj, TempFile): self._puttempfile(obj, req_num)
elif isinstance(obj, DSRPath): self._putdsrpath(obj, req_num)
elif isinstance(obj, RPath): self._putrpath(obj, req_num)
elif isinstance(obj, RORPath): self._putrorpath(obj, req_num)
elif isinstance(obj, connection.Connection):self._putconn(obj, req_num)
elif isinstance(obj, TempFile.TempFile):
self._puttempfile(obj, req_num)
elif isinstance(obj, destructive_stepping.DSRPath):
self._putdsrpath(obj, req_num)
elif isinstance(obj, rpath.RPath): self._putrpath(obj, req_num)
elif isinstance(obj, rpath.RORPath): self._putrorpath(obj, req_num)
elif ((hasattr(obj, "read") or hasattr(obj, "write"))
and hasattr(obj, "close")): self._putfile(obj, req_num)
elif hasattr(obj, "next"): self._putiter(obj, req_num)
......@@ -146,7 +148,7 @@ class LowLevelPipeConnection(Connection):
def _putiter(self, iterator, req_num):
"""Put an iterator through the pipe"""
self._write("i", str(VirtualFile.new(RORPIter.ToFile(iterator))),
self._write("i", str(VirtualFile.new(rorpiter.ToFile(iterator))),
req_num)
def _puttempfile(self, tempfile, req_num):
......@@ -239,8 +241,8 @@ class LowLevelPipeConnection(Connection):
elif format_string == "b": result = data
elif format_string == "f": result = VirtualFile(self, int(data))
elif format_string == "i":
result = RORPIter.FromFile(BufferedRead(VirtualFile(self,
int(data))))
result = rorpiter.FromFile(iterfile.BufferedRead(
VirtualFile(self, int(data))))
elif format_string == "t": result = self._gettempfile(data)
elif format_string == "r": result = self._getrorpath(data)
elif format_string == "R": result = self._getrpath(data)
......@@ -254,23 +256,25 @@ class LowLevelPipeConnection(Connection):
def _getrorpath(self, raw_rorpath_buf):
"""Reconstruct RORPath object from raw data"""
index, data = cPickle.loads(raw_rorpath_buf)
return RORPath(index, data)
return rpath.RORPath(index, data)
def _gettempfile(self, raw_tf_buf):
"""Return TempFile object indicated by raw_tf_buf"""
conn_number, base, index, data = cPickle.loads(raw_tf_buf)
return TempFile(Globals.connection_dict[conn_number],
return TempFile.TempFile(Globals.connection_dict[conn_number],
base, index, data)
def _getrpath(self, raw_rpath_buf):
"""Return RPath object indicated by raw_rpath_buf"""
conn_number, base, index, data = cPickle.loads(raw_rpath_buf)
return RPath(Globals.connection_dict[conn_number], base, index, data)
return rpath.RPath(Globals.connection_dict[conn_number],
base, index, data)
def _getdsrpath(self, raw_dsrpath_buf):
"""Return DSRPath object indicated by buf"""
conn_number, state_dict = cPickle.loads(raw_dsrpath_buf)
empty_dsrp = DSRPath("bypass", Globals.local_connection, None)
empty_dsrp = destructive_stepping.DSRPath("bypass",
Globals.local_connection, None)
empty_dsrp.__setstate__(state_dict)
empty_dsrp.conn = Globals.connection_dict[conn_number]
empty_dsrp.file = None
......@@ -538,22 +542,11 @@ class VirtualFile:
# everything has to be available here for remote connection's use, but
# put at bottom to reduce circularities.
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, Main
from static import *
from lazy import *
from log import *
from iterfile import *
from connection import *
from rpath import *
from robust import *
from rorpiter import *
from selection import *
from statistics import *
from increment import *
from restore import *
from manage import *
from highlevel import *
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \
Main, rorpiter, selection, increment, statistics, manage, lazy, \
iterfile, rpath, robust, restore, manage, highlevel, connection, \
TempFile, destructive_stepping, SetConnections
from log import Log
Globals.local_connection = LocalConnection()
Globals.connections.append(Globals.local_connection)
......
# Copyright 2002 Ben Escoto
#
# This file is part of rdiff-backup.
......@@ -21,14 +22,14 @@
from __future__ import generators
import types
from rpath import *
from lazy import *
import Globals, rpath, log
class DSRPPermError(Exception):
"""Exception used when a DSRPath can't get sufficient permissions"""
pass
class DSRPath(RPath):
class DSRPath(rpath.RPath):
"""Destructive Stepping RPath
Sometimes when we traverse the directory tree, even when we just
......@@ -59,11 +60,11 @@ class DSRPath(RPath):
"""
if base == 0:
assert isinstance(conn_or_rp, RPath)
RPath.__init__(self, conn_or_rp.conn,
assert isinstance(conn_or_rp, rpath.RPath)
rpath.RPath.__init__(self, conn_or_rp.conn,
conn_or_rp.base, conn_or_rp.index)
self.path = conn_or_rp.path # conn_or_rp may be quoted
else: RPath.__init__(self, conn_or_rp, base, index)
else: rpath.RPath.__init__(self, conn_or_rp, base, index)
if source != "bypass":
# "bypass" val is used when unpackaging over connection
......@@ -107,7 +108,7 @@ class DSRPath(RPath):
if not self.hasfullperms(): self.chmod_bypass(0700)
def warn(self, err):
Log("Received error '%s' when dealing with file %s, skipping..."
log.Log("Received error '%s' when dealing with file %s, skipping..."
% (err, self.path), 1)
raise DSRPPermError(self.path)
......@@ -136,7 +137,7 @@ class DSRPath(RPath):
def chmod(self, permissions):
"""Change permissions, delaying if self.perms_delayed is set"""
if self.delay_perms: self.newperms = self.data['perms'] = permissions
else: RPath.chmod(self, permissions)
else: rpath.RPath.chmod(self, permissions)
def getperms(self):
"""Return dsrp's intended permissions"""
......@@ -148,7 +149,7 @@ class DSRPath(RPath):
"""Change permissions without updating the data dictionary"""
self.delay_perms = 1
if self.newperms is None: self.newperms = self.getperms()
Log("DSRP: Perm bypass %s to %o" % (self.path, permissions), 8)
log.Log("DSRP: Perm bypass %s to %o" % (self.path, permissions), 8)
self.conn.os.chmod(self.path, permissions)
def settime(self, accesstime, modtime):
......@@ -157,12 +158,12 @@ class DSRPath(RPath):
if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime
if not self.delay_atime or not self.delay_mtime:
RPath.settime(self, accesstime, modtime)
rpath.RPath.settime(self, accesstime, modtime)
def setmtime(self, modtime):
"""Change mtime, delaying if self.times_delayed is set"""
if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime
else: RPath.setmtime(self, modtime)
else: rpath.RPath.setmtime(self, modtime)
def getmtime(self):
"""Return dsrp's intended modification time"""
......@@ -181,18 +182,18 @@ class DSRPath(RPath):
if not self.lstat(): return # File has been deleted in meantime
if self.delay_perms and self.newperms is not None:
Log("Finalizing permissions of dsrp %s to %s" %
log.Log("Finalizing permissions of dsrp %s to %s" %
(self.path, self.newperms), 8)
RPath.chmod(self, self.newperms)
rpath.RPath.chmod(self, self.newperms)
do_atime = self.delay_atime and self.newatime is not None
do_mtime = self.delay_mtime and self.newmtime is not None
if do_atime and do_mtime:
RPath.settime(self, self.newatime, self.newmtime)
rpath.RPath.settime(self, self.newatime, self.newmtime)
elif do_atime and not do_mtime:
RPath.settime(self, self.newatime, self.getmtime())
rpath.RPath.settime(self, self.newatime, self.getmtime())
elif not do_atime and do_mtime:
RPath.setmtime(self, self.newmtime)
rpath.RPath.setmtime(self, self.newmtime)
def newpath(self, newpath, index = ()):
"""Return similar DSRPath but with new path"""
......@@ -208,29 +209,4 @@ class DSRPath(RPath):
return self.__class__(self.source, self.conn, self.base, index)
class DestructiveSteppingFinalizer(ITRBranch):
"""Finalizer that can work on an iterator of dsrpaths
The reason we have to use an IterTreeReducer is that some files
should be updated immediately, but for directories we sometimes
need to update all the files in the directory before finally
coming back to it.
"""
dsrpath = None
def start_process(self, index, dsrpath):
self.dsrpath = dsrpath
def end_process(self):
if self.dsrpath: self.dsrpath.write_changes()
def can_fast_process(self, index, dsrpath):
return not self.dsrpath.isdir()
def fast_process(self, index, dsrpath):
if self.dsrpath: self.dsrpath.write_changes()
from log import *
from robust import *
import Globals
from __future__ import generators
from manage import *
from rpath import *
import rpath, manage
#######################################################################
#
......
......@@ -20,17 +20,8 @@
"""High level functions for mirroring, mirror & inc, etc."""
from __future__ import generators
from static import *
class SkipFileException(Exception):
"""Signal that the current file should be skipped but then continue
This exception will often be raised when there is problem reading
an individual file, but it makes sense for the rest of the backup
to keep going.
"""
pass
import Globals, MiscStats, metadata, rorpiter, TempFile, \
Hardlink, robust, increment, rpath, lazy, static, log
class HighLevel:
......@@ -48,8 +39,8 @@ class HighLevel:
Otherwise only mirror and don't create any extra files.
"""
SourceS = src_rpath.conn.HLSourceStruct
DestS = dest_rpath.conn.HLDestinationStruct
SourceS = src_rpath.conn.highlevel.HLSourceStruct
DestS = dest_rpath.conn.highlevel.HLDestinationStruct
src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
......@@ -61,8 +52,8 @@ class HighLevel:
def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath,
session_info = None):
"""Mirror + put increments in tree based at inc_rpath"""
SourceS = src_rpath.conn.HLSourceStruct
DestS = dest_rpath.conn.HLDestinationStruct
SourceS = src_rpath.conn.highlevel.HLSourceStruct
DestS = dest_rpath.conn.highlevel.HLDestinationStruct
src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
......@@ -72,7 +63,7 @@ class HighLevel:
dest_rpath.setdata()
inc_rpath.setdata()
MakeStatic(HighLevel)
static.MakeStatic(HighLevel)
class HLSourceStruct:
......@@ -80,7 +71,7 @@ class HLSourceStruct:
def split_initial_dsiter(cls):
"""Set iterators of all dsrps from rpath, returning one"""
dsiter = Globals.select_source.set_iter()
initial_dsiter1, cls.initial_dsiter2 = Iter.multiplex(dsiter, 2)
initial_dsiter1, cls.initial_dsiter2 = lazy.Iter.multiplex(dsiter, 2)
return initial_dsiter1
def get_diffs_and_finalize(cls, sigiter):
......@@ -90,9 +81,9 @@ class HLSourceStruct:
dissimilar files.
"""
collated = RORPIter.CollateIterators(cls.initial_dsiter2, sigiter)
collated = rorpiter.CollateIterators(cls.initial_dsiter2, sigiter)
def error_handler(exc, dest_sig, rp):
Log("Error %s producing a diff of %s" %
log.Log("Error %s producing a diff of %s" %
(exc, rp and rp.path), 2)
return None
......@@ -101,12 +92,12 @@ class HLSourceStruct:
if dest_sig:
if dest_sig.isplaceholder(): yield dest_sig
else:
diff = Robust.check_common_error(
error_handler, RORPIter.diffonce, [dest_sig, rp])
diff = robust.check_common_error(
error_handler, rorpiter.diffonce, [dest_sig, rp])
if diff: yield diff
return diffs()
MakeClass(HLSourceStruct)
static.MakeClass(HLSourceStruct)
class HLDestinationStruct:
......@@ -115,7 +106,7 @@ class HLDestinationStruct:
def split_initial_dsiter(cls):
"""Set initial_dsiters (iteration of all rps from rpath)"""
result, cls.initial_dsiter2 = \
Iter.multiplex(Globals.select_mirror.set_iter(), 2)
lazy.Iter.multiplex(Globals.select_mirror.set_iter(), 2)
return result
def get_dissimilar(cls, baserp, src_init_iter, dest_init_iter):
......@@ -134,13 +125,13 @@ class HLDestinationStruct:
will depend on the Globals.conn_bufsize value.
"""
collated = RORPIter.CollateIterators(src_init_iter, dest_init_iter)
collated = rorpiter.CollateIterators(src_init_iter, dest_init_iter)
def compare(src_rorp, dest_dsrp):
"""Return dest_dsrp if they are different, None if the same"""
if not dest_dsrp:
dest_dsrp = cls.get_dsrp(baserp, src_rorp.index)
if dest_dsrp.lstat():
Log("Warning: Found unexpected destination file %s, "
log.Log("Warning: Found unexpected destination file %s, "
"not processing it." % dest_dsrp.path, 2)
return None
elif (src_rorp and src_rorp == dest_dsrp and
......@@ -162,7 +153,7 @@ class HLDestinationStruct:
counter = 0
yield dsrp
elif counter == 20:
placeholder = RORPath(src_rorp.index)
placeholder = rpath.RORPath(src_rorp.index)
placeholder.make_placeholder()
counter = 0
yield placeholder
......@@ -185,11 +176,11 @@ class HLDestinationStruct:
metadata.CloseMetadata()
dup = duplicate_with_write(src_init_iter)
dissimilars = cls.get_dissimilar(baserp, dup, dest_iters1)
return RORPIter.Signatures(dissimilars)
return rorpiter.Signatures(dissimilars)
def get_dsrp(cls, dest_rpath, index):
"""Return initialized rpath based on dest_rpath with given index"""
rp = RPath(dest_rpath.conn, dest_rpath.base, index)
rp = rpath.RPath(dest_rpath.conn, dest_rpath.base, index)
if Globals.quoting_enabled: rp.quote_path()
return rp
......@@ -197,14 +188,16 @@ class HLDestinationStruct:
"""Return finalizer, starting from session info if necessary"""
old_finalizer = cls._session_info and cls._session_info.finalizer
if old_finalizer: return old_finalizer
else: return IterTreeReducer(DestructiveSteppingFinalizer, [])
else: return rorpiter.IterTreeReducer(
rorpiter.DestructiveSteppingFinalizer, [])
def get_ITR(cls, inc_rpath):
"""Return ITR, starting from state if necessary"""
if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR
else:
iitr = IterTreeReducer(IncrementITRB, [inc_rpath])
iitr = rorpiter.IterTreeReducer(increment.IncrementITRB,
[inc_rpath])
iitr.root_branch.override_changed()
Globals.ITRB = iitr.root_branch
iitr.root_branch.Errors = 0
......@@ -214,38 +207,38 @@ class HLDestinationStruct:
"""Return MirrorITR, starting from state if available"""
if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR
ITR = IterTreeReducer(MirrorITRB, [inc_rpath])
ITR = rorpiter.IterTreeReducer(increment.MirrorITRB, [inc_rpath])
Globals.ITRB = ITR.root_branch
ITR.root_branch.Errors = 0
return ITR
def patch_and_finalize(cls, dest_rpath, diffs):
"""Apply diffs and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer = cls.get_finalizer()
diff_rorp, rp = None, None
def patch(diff_rorp, dsrp):
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and not diff_rorp.isplaceholder():
RORPIter.patchonce_action(None, dsrp, diff_rorp).execute()
rorpiter.patchonce_action(None, dsrp, diff_rorp).execute()
return dsrp
def error_handler(exc, diff_rorp, dsrp):
filename = dsrp and dsrp.path or os.path.join(*diff_rorp.index)
Log("Error: %s processing file %s" % (exc, filename), 2)
log.Log("Error: %s processing file %s" % (exc, filename), 2)
for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7)
log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
dsrp = Robust.check_common_error(error_handler, patch,
dsrp = robust.check_common_error(error_handler, patch,
[diff_rorp, dsrp])
#finalizer(dsrp.index, dsrp)
#finalizer.Finish()
def patch_w_datadir_writes(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs and finalize, with checkpointing and statistics"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer, ITR = cls.get_finalizer(), cls.get_MirrorITR(inc_rpath)
finalizer, ITR = None, cls.get_MirrorITR(inc_rpath)
MiscStats.open_dir_stats_file()
......@@ -253,7 +246,7 @@ class HLDestinationStruct:
try:
for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7)
log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and diff_rorp.isplaceholder(): diff_rorp = None
......@@ -270,7 +263,7 @@ class HLDestinationStruct:
def patch_increment_and_finalize(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs, write increment if necessary, and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer, ITR = cls.get_finalizer(), cls.get_ITR(inc_rpath)
finalizer, ITR = None, cls.get_ITR(inc_rpath)
MiscStats.open_dir_stats_file()
......@@ -278,7 +271,7 @@ class HLDestinationStruct:
try:
for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7)
log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
index = indexed_tuple.index
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, index)
......@@ -296,18 +289,12 @@ class HLDestinationStruct:
def handle_last_error(cls, dsrp, finalizer, ITR):
"""If catch fatal error, try to checkpoint before exiting"""
Log.exception(1, 2)
TracebackArchive.log()
log.Log.exception(1, 2)
robust.TracebackArchive.log()
#SaveState.checkpoint(ITR, finalizer, dsrp, 1)
#if Globals.preserve_hardlinks: Hardlink.final_checkpoint(Globals.rbdir)
#SaveState.touch_last_file_definitive()
raise
MakeClass(HLDestinationStruct)
static.MakeClass(HLDestinationStruct)
from log import *
from rpath import *
from robust import *
from increment import *
from rorpiter import *
import Globals, Hardlink, MiscStats, metadata
......@@ -17,17 +17,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
"""Provides Inc and *ITR classes, which relate to writing increment files"""
"""Provides functions and *ITR classes, for writing increment files"""
import traceback
from static import *
from statistics import *
from lazy import *
from log import Log
import Globals, Time, MiscStats, rorpiter, TempFile, robust, \
statistics, rpath, static, lazy, Rdiff, Hardlink
class Inc:
"""Class containing increment functions"""
def Increment_action(new, mirror, incpref):
"""Main file incrementing function, returns RobustAction
def Increment_action(new, mirror, incpref):
"""Main file incrementing function, returns robust.Action
new is the file on the active partition,
mirror is the mirrored file from the last backup,
......@@ -36,72 +35,72 @@ class Inc:
This function basically moves the information about the mirror
file to incpref.
The returned RobustAction when executed should return the name
The returned robust.Action when executed should return the name
of the incfile, or None if none was created.
"""
if not (new and new.lstat() or mirror.lstat()):
return Robust.null_action # Files deleted in meantime, do nothing
return robust.null_action # Files deleted in meantime, do nothing
Log("Incrementing mirror file " + mirror.path, 5)
if ((new and new.isdir()) or mirror.isdir()) and not incpref.isdir():
incpref.mkdir()
if not mirror.lstat(): return Inc.makemissing_action(incpref)
elif mirror.isdir(): return Inc.makedir_action(mirror, incpref)
if not mirror.lstat(): return makemissing_action(incpref)
elif mirror.isdir(): return makedir_action(mirror, incpref)
elif new.isreg() and mirror.isreg():
return Inc.makediff_action(new, mirror, incpref)
else: return Inc.makesnapshot_action(mirror, incpref)
return makediff_action(new, mirror, incpref)
else: return makesnapshot_action(mirror, incpref)
def Increment(new, mirror, incpref):
return Inc.Increment_action(new, mirror, incpref).execute()
def Increment(new, mirror, incpref):
return Increment_action(new, mirror, incpref).execute()
def makemissing_action(incpref):
def makemissing_action(incpref):
"""Signify that mirror file was missing"""
def final(init_val):
incrp = Inc.get_inc_ext(incpref, "missing")
incrp = get_inc_ext(incpref, "missing")
incrp.touch()
return incrp
return RobustAction(None, final, None)
return robust.Action(None, final, None)
def makesnapshot_action(mirror, incpref):
def makesnapshot_action(mirror, incpref):
"""Copy mirror to incfile, since new is quite different"""
if (mirror.isreg() and Globals.compression and
not Globals.no_compression_regexp.match(mirror.path)):
snapshotrp = Inc.get_inc_ext(incpref, "snapshot.gz")
return Robust.copy_with_attribs_action(mirror, snapshotrp, 1)
snapshotrp = get_inc_ext(incpref, "snapshot.gz")
return robust.copy_with_attribs_action(mirror, snapshotrp, 1)
else:
snapshotrp = Inc.get_inc_ext(incpref, "snapshot")
return Robust.copy_with_attribs_action(mirror, snapshotrp, None)
snapshotrp = get_inc_ext(incpref, "snapshot")
return robust.copy_with_attribs_action(mirror, snapshotrp, None)
def makediff_action(new, mirror, incpref):
def makediff_action(new, mirror, incpref):
"""Make incfile which is a diff new -> mirror"""
if (Globals.compression and
not Globals.no_compression_regexp.match(mirror.path)):
diff = Inc.get_inc_ext(incpref, "diff.gz")
diff = get_inc_ext(incpref, "diff.gz")
compress = 1
else:
diff = Inc.get_inc_ext(incpref, "diff")
diff = get_inc_ext(incpref, "diff")
compress = None
diff_tf = TempFileManager.new(diff)
diff_tf = TempFile.new(diff)
def init():
Rdiff.write_delta(new, mirror, diff_tf, compress)
RPath.copy_attribs(mirror, diff_tf)
rpath.copy_attribs(mirror, diff_tf)
return diff
return Robust.make_tf_robustaction(init, diff_tf, diff)
return robust.make_tf_robustaction(init, diff_tf, diff)
def makedir_action(mirrordir, incpref):
def makedir_action(mirrordir, incpref):
"""Make file indicating directory mirrordir has changed"""
dirsign = Inc.get_inc_ext(incpref, "dir")
tf = TempFileManager.new(dirsign)
dirsign = get_inc_ext(incpref, "dir")
tf = TempFile.new(dirsign)
def init():
tf.touch()
RPath.copy_attribs(mirrordir, tf)
rpath.copy_attribs(mirrordir, tf)
return dirsign
return Robust.make_tf_robustaction(init, tf, dirsign)
return robust.make_tf_robustaction(init, tf, dirsign)
def get_inc(rp, time, typestr):
def get_inc(rp, time, typestr):
"""Return increment like rp but with time and typestr suffixes"""
addtostr = lambda s: "%s.%s.%s" % (s, Time.timetostring(time), typestr)
if rp.index:
......@@ -111,7 +110,7 @@ class Inc:
if Globals.quoting_enabled: incrp.quote_path()
return incrp
def get_inc_ext(rp, typestr):
def get_inc_ext(rp, typestr):
"""Return increment with specified type and correct time
If the file exists, then probably a previous backup has been
......@@ -121,15 +120,16 @@ class Inc:
"""
inctime = 0
while 1:
inctime = Resume.FindTime(rp.index, inctime)
incrp = Inc.get_inc(rp, inctime, typestr)
#inctime = robust.Resume.FindTime(rp.index, inctime)
inctime = Time.prevtime
incrp = get_inc(rp, inctime, typestr)
if not incrp.lstat(): break
else:
assert 0, "Inc file already present"
return incrp
MakeStatic(Inc)
class IncrementITRB(StatsITRB):
class IncrementITRB(statistics.ITRB):
"""Patch and increment mirror directory
This has to be an ITR because directories that have files in them
......@@ -159,7 +159,7 @@ class IncrementITRB(StatsITRB):
def __init__(self, inc_rpath):
"""Set inc_rpath, an rpath of the base of the tree"""
self.inc_rpath = inc_rpath
StatsITRB.__init__(self)
statistics.ITRB.__init__(self)
def start_process(self, index, diff_rorp, dsrp):
"""Initial processing of file
......@@ -209,12 +209,12 @@ class IncrementITRB(StatsITRB):
"""
if not (incpref.lstat() and incpref.isdir()): incpref.mkdir()
if diff_rorp and diff_rorp.isreg() and diff_rorp.file:
tf = TempFileManager.new(dsrp)
tf = TempFile.new(dsrp)
def init():
RPathStatic.copy_with_attribs(diff_rorp, tf)
rpath.copy_with_attribs(diff_rorp, tf)
tf.set_attached_filetype(diff_rorp.get_attached_filetype())
def error(exc, ran_init, init_val): tf.delete()
RobustAction(init, None, error).execute()
robust.Action(init, None, error).execute()
self.directory_replacement = tf
def init_non_dir(self, dsrp, diff_rorp, incpref):
......@@ -223,16 +223,16 @@ class IncrementITRB(StatsITRB):
if diff_rorp.isreg() and (dsrp.isreg() or diff_rorp.isflaglinked()):
# Write updated mirror to temp file so we can compute
# reverse diff locally
mirror_tf = TempFileManager.new(dsrp)
old_dsrp_tf = TempFileManager.new(dsrp)
mirror_tf = TempFile.new(dsrp)
old_dsrp_tf = TempFile.new(dsrp)
def init_thunk():
if diff_rorp.isflaglinked():
Hardlink.link_rp(diff_rorp, mirror_tf, dsrp)
else: Rdiff.patch_with_attribs_action(dsrp, diff_rorp,
mirror_tf).execute()
self.incrp = Inc.Increment_action(mirror_tf, dsrp,
self.incrp = Increment_action(mirror_tf, dsrp,
incpref).execute()
if dsrp.lstat(): RPathStatic.rename(dsrp, old_dsrp_tf)
if dsrp.lstat(): rpath.rename(dsrp, old_dsrp_tf)
mirror_tf.rename(dsrp)
def final(init_val): old_dsrp_tf.delete()
......@@ -243,10 +243,10 @@ class IncrementITRB(StatsITRB):
if self.incrp: self.incrp.delete()
mirror_tf.delete()
RobustAction(init_thunk, final, error).execute()
else: self.incrp = Robust.chain(
Inc.Increment_action(diff_rorp, dsrp, incpref),
RORPIter.patchonce_action(None, dsrp, diff_rorp)).execute()[0]
robust.Action(init_thunk, final, error).execute()
else: self.incrp = robust.chain(
Increment_action(diff_rorp, dsrp, incpref),
rorpiter.patchonce_action(None, dsrp, diff_rorp)).execute()[0]
self.changed = 1
......@@ -257,14 +257,14 @@ class IncrementITRB(StatsITRB):
or self.directory_replacement):
if self.directory_replacement:
tf = self.directory_replacement
self.incrp = Robust.chain(
Inc.Increment_action(tf, dsrp, incpref),
RORPIter.patchonce_action(None, dsrp, tf)).execute()[0]
self.incrp = robust.chain(
Increment_action(tf, dsrp, incpref),
rorpiter.patchonce_action(None, dsrp, tf)).execute()[0]
tf.delete()
else:
self.incrp = Inc.Increment(diff_rorp, dsrp, incpref)
self.incrp = Increment(diff_rorp, dsrp, incpref)
if diff_rorp:
RORPIter.patchonce_action(None, dsrp, diff_rorp).execute()
rorpiter.patchonce_action(None, dsrp, diff_rorp).execute()
self.end_stats(diff_rorp, dsrp, self.incrp)
if self.mirror_isdirectory or dsrp.isdir():
......@@ -276,7 +276,7 @@ class IncrementITRB(StatsITRB):
def fast_process(self, index, diff_rorp, dsrp):
"""Just update statistics"""
StatsITRB.fast_process(self, dsrp)
statistics.ITRB.fast_process(self, dsrp)
def branch_process(self, branch):
"""Update statistics, and the has_changed flag if change in branch"""
......@@ -285,14 +285,14 @@ class IncrementITRB(StatsITRB):
self.add_file_stats(branch)
class MirrorITRB(StatsITRB):
class MirrorITRB(statistics.ITRB):
"""Like IncrementITR, but only patch mirror directory, don't increment"""
# This is always None since no increments will be created
incrp = None
def __init__(self, inc_rpath):
"""Set inc_rpath, an rpath of the base of the inc tree"""
self.inc_rpath = inc_rpath
StatsITRB.__init__(self)
statistics.ITRB.__init__(self)
def start_process(self, index, diff_rorp, mirror_dsrp):
"""Initialize statistics and do actual writing to mirror"""
......@@ -305,7 +305,7 @@ class MirrorITRB(StatsITRB):
mirror_dsrp.delete()
mirror_dsrp.mkdir()
elif diff_rorp and not diff_rorp.isplaceholder():
RORPIter.patchonce_action(None, mirror_dsrp, diff_rorp).execute()
rorpiter.patchonce_action(None, mirror_dsrp, diff_rorp).execute()
self.incpref = self.inc_rpath.new_index(index)
self.diff_rorp, self.mirror_dsrp = diff_rorp, mirror_dsrp
......@@ -314,7 +314,7 @@ class MirrorITRB(StatsITRB):
"""Update statistics when leaving"""
self.end_stats(self.diff_rorp, self.mirror_dsrp)
if self.mirror_dsrp.isdir():
RPathStatic.copy_attribs(self.diff_rorp, self.mirror_dsrp)
rpath.copy_attribs(self.diff_rorp, self.mirror_dsrp)
MiscStats.write_dir_stats_line(self, self.mirror_dsrp.index)
def can_fast_process(self, index, diff_rorp, mirror_dsrp):
......@@ -323,7 +323,7 @@ class MirrorITRB(StatsITRB):
def fast_process(self, index, diff_rorp, mirror_dsrp):
"""Just update statistics"""
StatsITRB.fast_process(self, mirror_dsrp)
statistics.ITRB.fast_process(self, mirror_dsrp)
def branch_process(self, branch):
"""Update statistics with subdirectory results"""
......@@ -331,9 +331,4 @@ class MirrorITRB(StatsITRB):
self.add_file_stats(branch)
from log import *
from rpath import *
from robust import *
from rorpiter import *
import Globals, Time, MiscStats
......@@ -20,7 +20,7 @@
"""Convert an iterator to a file object and vice-versa"""
import cPickle, array
import Globals, C
import Globals, C, robust, log
class IterFileException(Exception): pass
......@@ -200,7 +200,7 @@ class FileWrappingIter:
def addfromfile(self):
"""Read a chunk from the current file and return it"""
# Check file read for errors, buf = "" if find one
buf = Robust.check_common_error(self.read_error_handler,
buf = robust.check_common_error(self.read_error_handler,
self.currently_in_file.read,
[Globals.blocksize])
if not buf:
......@@ -210,7 +210,7 @@ class FileWrappingIter:
def read_error_handler(self, exc, blocksize):
"""Log error when reading from file"""
Log("Error '%s' reading from fileobj, truncating" % (str(exc),), 2)
log.Log("Error '%s' reading from fileobj, truncating" % (str(exc),), 2)
return ""
def _l2s_old(self, l):
......@@ -253,5 +253,4 @@ class BufferedRead:
def close(self): return self.file.close()
from log import *
from robust import *
......@@ -21,7 +21,8 @@
from __future__ import generators
import os, stat, types
from static import *
import static
class Iter:
"""Hold static methods for the manipulation of lazy iterators"""
......@@ -163,7 +164,7 @@ class Iter:
return tuple(map(make_iterator, range(num_of_forks)))
MakeStatic(Iter)
static.MakeStatic(Iter)
class IterMultiplex2:
......@@ -200,166 +201,3 @@ class IterMultiplex2:
else: elem = buf.pop(0) # a is in front, subtract an element
self.a_leading_by -= 1
yield elem
class IterTreeReducer:
"""Tree style reducer object for iterator
The indicies of a RORPIter form a tree type structure. This class
can be used on each element of an iter in sequence and the result
will be as if the corresponding tree was reduced. This tries to
bridge the gap between the tree nature of directories, and the
iterator nature of the connection between hosts and the temporal
order in which the files are processed.
"""
def __init__(self, branch_class, branch_args):
"""ITR initializer"""
self.branch_class = branch_class
self.branch_args = branch_args
self.index = None
self.root_branch = branch_class(*branch_args)
self.branches = [self.root_branch]
def finish_branches(self, index):
"""Run Finish() on all branches index has passed
When we pass out of a branch, delete it and process it with
the parent. The innermost branches will be the last in the
list. Return None if we are out of the entire tree, and 1
otherwise.
"""
branches = self.branches
while 1:
to_be_finished = branches[-1]
base_index = to_be_finished.base_index
if base_index != index[:len(base_index)]:
# out of the tree, finish with to_be_finished
to_be_finished.call_end_proc()
del branches[-1]
if not branches: return None
branches[-1].branch_process(to_be_finished)
else: return 1
def add_branch(self, index):
"""Return branch of type self.branch_class, add to branch list"""
branch = self.branch_class(*self.branch_args)
branch.base_index = index
self.branches.append(branch)
return branch
def process_w_branch(self, branch, args):
"""Run start_process on latest branch"""
Robust.check_common_error(branch.on_error,
branch.start_process, args)
if not branch.caught_exception: branch.start_successful = 1
def Finish(self):
"""Call at end of sequence to tie everything up"""
while 1:
to_be_finished = self.branches.pop()
to_be_finished.call_end_proc()
if not self.branches: break
self.branches[-1].branch_process(to_be_finished)
def __call__(self, *args):
"""Process args, where args[0] is current position in iterator
Returns true if args successfully processed, false if index is
not in the current tree and thus the final result is
available.
Also note below we set self.index after doing the necessary
start processing, in case there is a crash in the middle.
"""
index = args[0]
if self.index is None:
self.root_branch.base_index = index
self.process_w_branch(self.root_branch, args)
self.index = index
return 1
if index <= self.index:
Log("Warning: oldindex %s >= newindex %s" % (self.index, index), 2)
return 1
if self.finish_branches(index) is None:
return None # We are no longer in the main tree
last_branch = self.branches[-1]
if last_branch.start_successful:
if last_branch.can_fast_process(*args):
last_branch.fast_process(*args)
else:
branch = self.add_branch(index)
self.process_w_branch(branch, args)
else: last_branch.log_prev_error(index)
self.index = index
return 1
class ITRBranch:
"""Helper class for IterTreeReducer below
There are five stub functions below: start_process, end_process,
branch_process, can_fast_process, and fast_process. A class that
subclasses this one will probably fill in these functions to do
more.
It is important that this class be pickable, so keep that in mind
when subclassing (this is used to resume failed sessions).
"""
base_index = index = None
finished = None
caught_exception = start_successful = None
def call_end_proc(self):
"""Runs the end_process on self, checking for errors"""
if self.finished or not self.start_successful:
self.caught_exception = 1
if self.caught_exception: self.log_prev_error(self.base_index)
else: Robust.check_common_error(self.on_error, self.end_process)
self.finished = 1
def start_process(self, *args):
"""Do some initial processing (stub)"""
pass
def end_process(self):
"""Do any final processing before leaving branch (stub)"""
pass
def branch_process(self, branch):
"""Process a branch right after it is finished (stub)"""
assert branch.finished
pass
def can_fast_process(self, *args):
"""True if object can be processed without new branch (stub)"""
return None
def fast_process(self, *args):
"""Process args without new child branch (stub)"""
pass
def on_error(self, exc, *args):
"""This is run on any exception in start/end-process"""
self.caught_exception = 1
if args and args[0] and isinstance(args[0], tuple):
filename = os.path.join(*args[0])
elif self.index: filename = os.path.join(*self.index)
else: filename = "."
Log("Error '%s' processing %s" % (exc, filename), 2)
def log_prev_error(self, index):
"""Call function if no pending exception"""
Log("Skipping %s because of previous error" %
(os.path.join(*index),), 2)
# Put at bottom to prevent (viciously) circular module dependencies
from robust import *
from log import *
......@@ -20,6 +20,7 @@
"""Manage logging, displaying and recording messages with required verbosity"""
import time, sys, traceback, types
import Globals
class LoggerError(Exception): pass
......@@ -151,6 +152,7 @@ class Logger:
def FatalError(self, message):
self("Fatal Error: " + message, 1)
import Main
Main.cleanup()
sys.exit(1)
......@@ -180,4 +182,4 @@ class Logger:
logging_func(self.exception_to_string(), verbosity)
Log = Logger()
import Globals, Main
......@@ -20,32 +20,30 @@
"""list, delete, and otherwise manage increments"""
from __future__ import generators
from static import *
from log import *
import Globals, Time
from log import Log
import Globals, Time, static, manage
class ManageException(Exception): pass
class Manage:
def get_file_type(rp):
def get_file_type(rp):
"""Returns one of "regular", "directory", "missing", or "special"."""
if not rp.lstat(): return "missing"
elif rp.isdir(): return "directory"
elif rp.isreg(): return "regular"
else: return "special"
def get_inc_type(inc):
def get_inc_type(inc):
"""Return file type increment represents"""
assert inc.isincfile()
type = inc.getinctype()
if type == "dir": return "directory"
elif type == "diff": return "regular"
elif type == "missing": return "missing"
elif type == "snapshot": return Manage.get_file_type(inc)
elif type == "snapshot": return get_file_type(inc)
else: assert None, "Unknown type %s" % (type,)
def describe_incs_parsable(incs, mirror_time, mirrorrp):
def describe_incs_parsable(incs, mirror_time, mirrorrp):
"""Return a string parsable by computer describing the increments
Each line is a time in seconds of the increment, and then the
......@@ -60,12 +58,11 @@ class Manage:
"""
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs.sort()
result = ["%s %s" % (time, Manage.get_inc_type(inc))
for time, inc in incpairs]
result.append("%s %s" % (mirror_time, Manage.get_file_type(mirrorrp)))
result = ["%s %s" % (time, get_inc_type(inc)) for time, inc in incpairs]
result.append("%s %s" % (mirror_time, get_file_type(mirrorrp)))
return "\n".join(result)
def describe_incs_human(incs, mirror_time, mirrorrp):
def describe_incs_human(incs, mirror_time, mirrorrp):
"""Return a string describing all the the root increments"""
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs.sort()
......@@ -77,7 +74,7 @@ class Manage:
result.append("Current mirror: %s" % Time.timetopretty(mirror_time))
return "\n".join(result)
def delete_earlier_than(baserp, time):
def delete_earlier_than(baserp, time):
"""Deleting increments older than time in directory baserp
time is in seconds. It will then delete any empty directories
......@@ -85,9 +82,9 @@ class Manage:
rdiff-backup-data directory should be the root of the tree.
"""
baserp.conn.Manage.delete_earlier_than_local(baserp, time)
baserp.conn.manage.delete_earlier_than_local(baserp, time)
def delete_earlier_than_local(baserp, time):
def delete_earlier_than_local(baserp, time):
"""Like delete_earlier_than, but run on local connection for speed"""
assert baserp.conn is Globals.local_connection
def yield_files(rp):
......@@ -104,8 +101,6 @@ class Manage:
Log("Deleting increment file %s" % rp.path, 5)
rp.delete()
MakeStatic(Manage)
class IncObj:
"""Increment object - represent a completed increment"""
......
......@@ -20,14 +20,15 @@
"""Read increment files and restore to original"""
from __future__ import generators
import tempfile
from static import *
import tempfile, os
from log import Log
import Globals, Time, Rdiff, Hardlink, FilenameMapping, SetConnections, \
rorpiter, selection, destructive_stepping, rpath, lazy
class RestoreError(Exception): pass
class Restore:
def Restore(inc_rpath, mirror, target, rest_time):
def Restore(inc_rpath, mirror, target, rest_time):
"""Recursively restore inc_rpath and mirror to target at rest_time
Like restore_recusive below, but with a more friendly
......@@ -41,28 +42,28 @@ class Restore:
same index as mirror.
"""
if not isinstance(target, DSRPath): target = DSRPath(None, target)
if not isinstance(target, destructive_stepping.DSRPath):
target = destructive_stepping.DSRPath(None, target)
mirror_time = Restore.get_mirror_time()
rest_time = Restore.get_rest_time(rest_time, mirror_time)
inc_list = Restore.get_inclist(inc_rpath)
mirror_time = get_mirror_time()
rest_time = get_rest_time(rest_time, mirror_time)
inc_list = get_inclist(inc_rpath)
rid = RestoreIncrementData(inc_rpath.index, inc_rpath, inc_list)
rid.sortincseq(rest_time, mirror_time)
Restore.check_hardlinks(rest_time)
Restore.restore_recursive(inc_rpath.index, mirror, rid, target,
check_hardlinks(rest_time)
restore_recursive(inc_rpath.index, mirror, rid, target,
rest_time, mirror_time)
def get_mirror_time():
def get_mirror_time():
"""Return the time (in seconds) of latest mirror"""
current_mirror_incs = \
Restore.get_inclist(Globals.rbdir.append("current_mirror"))
current_mirror_incs = get_inclist(Globals.rbdir.append("current_mirror"))
if not current_mirror_incs:
Log.FatalError("Could not get time of current mirror")
elif len(current_mirror_incs) > 1:
Log("Warning, two different dates for current mirror found", 2)
return Time.stringtotime(current_mirror_incs[0].getinctime())
def get_rest_time(old_rest_time, mirror_time):
def get_rest_time(old_rest_time, mirror_time):
"""If old_rest_time is between two increments, return older time
There is a slightly tricky reason for doing this: The rest of
......@@ -77,7 +78,7 @@ class Restore:
older one here.
"""
base_incs = Restore.get_inclist(Globals.rbdir.append("increments"))
base_incs = get_inclist(Globals.rbdir.append("increments"))
if not base_incs: return old_rest_time
inctimes = [Time.stringtotime(inc.getinctime()) for inc in base_incs]
inctimes.append(mirror_time)
......@@ -86,19 +87,19 @@ class Restore:
else: # restore time older than oldest increment, just return that
return min(inctimes)
def get_inclist(inc_rpath):
def get_inclist(inc_rpath):
"""Returns increments with given base"""
dirname, basename = inc_rpath.dirsplit()
parent_dir = RPath(inc_rpath.conn, dirname, ())
parent_dir = rpath.RPath(inc_rpath.conn, dirname, ())
if not parent_dir.isdir(): return [] # inc directory not created yet
index = inc_rpath.index
if index:
get_inc_ext = lambda filename: \
RPath(inc_rpath.conn, inc_rpath.base,
rpath.RPath(inc_rpath.conn, inc_rpath.base,
inc_rpath.index[:-1] + (filename,))
else: get_inc_ext = lambda filename: \
RPath(inc_rpath.conn, os.path.join(dirname, filename))
rpath.RPath(inc_rpath.conn, os.path.join(dirname, filename))
inc_list = []
for filename in parent_dir.listdir():
......@@ -107,7 +108,7 @@ class Restore:
inc_list.append(inc)
return inc_list
def check_hardlinks(rest_time):
def check_hardlinks(rest_time):
"""Check for hard links and enable hard link support if found"""
if (Globals.preserve_hardlinks != 0 and
Hardlink.retrieve_final(rest_time)):
......@@ -116,7 +117,7 @@ class Restore:
SetConnections.UpdateGlobal('preserve_hardlinks', 1)
else: SetConnections.UpdateGlobal('preserve_hardlinks', None)
def restore_recursive(index, mirror, rid, target, time, mirror_time):
def restore_recursive(index, mirror, rid, target, time, mirror_time):
"""Recursive restore function.
rid is a RestoreIncrementData object whose inclist is already
......@@ -128,18 +129,18 @@ class Restore:
"foo/bar".
"""
assert isinstance(target, DSRPath)
assert isinstance(target, destructive_stepping.DSRPath)
assert mirror.index == rid.index
target_finalizer = IterTreeReducer(DestructiveSteppingFinalizer, ())
for rcd in Restore.yield_rcds(rid.index, mirror, rid,
target, time, mirror_time):
target_finalizer = rorpiter.IterTreeReducer(
rorpiter.DestructiveSteppingFinalizer, ())
for rcd in yield_rcds(rid.index, mirror, rid, target, time, mirror_time):
rcd.RestoreFile()
#if rcd.mirror: mirror_finalizer(rcd.index, rcd.mirror)
target_finalizer(rcd.target.index, rcd.target)
target_finalizer.Finish()
def yield_rcds(index, mirrorrp, rid, target, rest_time, mirror_time):
def yield_rcds(index, mirrorrp, rid, target, rest_time, mirror_time):
"""Iterate RestoreCombinedData objects starting with given args
rid is a RestoreCombinedData object. target is an rpath where
......@@ -159,7 +160,7 @@ class Restore:
if mirrorrp and mirrorrp.isdir() or \
rid and rid.inc_rpath and rid.inc_rpath.isdir():
sub_rcds = Restore.yield_sub_rcds(index, mirrorrp, rid,
sub_rcds = yield_sub_rcds(index, mirrorrp, rid,
target, rest_time, mirror_time)
else: sub_rcds = None
......@@ -175,20 +176,20 @@ class Restore:
yield first
for sub_rcd in sub_rcds: yield sub_rcd
def yield_sub_rcds(index, mirrorrp, rid, target, rest_time, mirror_time):
def yield_sub_rcds(index, mirrorrp, rid, target, rest_time, mirror_time):
"""Yield collated tuples from inside given args"""
if not Restore.check_dir_exists(mirrorrp, rid): return
mirror_iter = Restore.yield_mirrorrps(mirrorrp)
rid_iter = Restore.yield_rids(rid, rest_time, mirror_time)
if not check_dir_exists(mirrorrp, rid): return
mirror_iter = yield_mirrorrps(mirrorrp)
rid_iter = yield_rids(rid, rest_time, mirror_time)
for indexed_tup in RORPIter.CollateIterators(mirror_iter, rid_iter):
for indexed_tup in rorpiter.CollateIterators(mirror_iter, rid_iter):
index = indexed_tup.index
new_mirrorrp, new_rid = indexed_tup
for rcd in Restore.yield_rcds(index, new_mirrorrp,
new_rid, target.append(index[-1]), rest_time, mirror_time):
for rcd in yield_rcds(index, new_mirrorrp, new_rid,
target.append(index[-1]), rest_time, mirror_time):
yield rcd
def check_dir_exists(mirrorrp, rid):
def check_dir_exists(mirrorrp, rid):
"""Return true if target should be a directory"""
if rid and rid.inc_list:
# Incs say dir if last (earliest) one is a dir increment
......@@ -196,18 +197,18 @@ class Restore:
elif mirrorrp: return mirrorrp.isdir() # if no incs, copy mirror
else: return None
def yield_mirrorrps(mirrorrp):
def yield_mirrorrps(mirrorrp):
"""Yield mirrorrps underneath given mirrorrp"""
if mirrorrp and mirrorrp.isdir():
if Globals.quoting_enabled:
for rp in FilenameMapping.get_quoted_dir_children(mirrorrp):
for rp in selection.get_quoted_dir_children(mirrorrp):
yield rp
else:
dirlist = mirrorrp.listdir()
dirlist.sort()
for filename in dirlist: yield mirrorrp.append(filename)
def yield_rids(rid, rest_time, mirror_time):
def yield_rids(rid, rest_time, mirror_time):
"""Yield RestoreIncrementData objects within given rid dir
If the rid doesn't correspond to a directory, don't yield any
......@@ -251,8 +252,6 @@ class Restore:
rid.sortincseq(rest_time, mirror_time)
yield rid
MakeStatic(Restore)
class RestoreIncrementData:
"""Contains information about a specific index from the increments dir
......@@ -339,7 +338,7 @@ class RestoreCombinedData:
if not self.inc_list or self.inc_list[0].getinctype() == "diff":
assert self.mirror and self.mirror.lstat(), \
"No base to go with incs for %s" % self.target.path
RPath.copy_with_attribs(self.mirror, self.target)
rpath.copy_with_attribs(self.mirror, self.target)
for inc in self.inc_list: self.applyinc(inc, self.target)
def log(self):
......@@ -353,7 +352,7 @@ class RestoreCombinedData:
"""Hard link target and return true if hard linking appropriate"""
if (Globals.preserve_hardlinks and
Hardlink.restore_link(self.index, self.target)):
RPath.copy_attribs(self.inc_list and self.inc_list[-1] or
rpath.copy_attribs(self.inc_list and self.inc_list[-1] or
self.mirror, self.target)
return 1
return None
......@@ -377,13 +376,8 @@ class RestoreCombinedData:
elif inctype == "snapshot":
if inc.isinccompressed():
target.write_from_fileobj(inc.open("rb", compress = 1))
else: RPath.copy(inc, target)
else: rpath.copy(inc, target)
else: raise RestoreError("Unknown inctype %s" % inctype)
RPath.copy_attribs(inc, target)
rpath.copy_attribs(inc, target)
from log import *
from destructive_stepping import *
from rpath import *
from rorpiter import *
import Globals, Time, Rdiff, Hardlink, FilenameMapping, SetConnections
......@@ -46,13 +46,16 @@ able to narrow down the possibilities.
"""
import tempfile, errno, signal, cPickle, C
from static import *
import os, time
from log import Log
import Time, librsync, errno, signal, cPickle, C, \
Hardlink, TempFile, static, rpath, Globals
class RobustAction:
class Action:
"""Represents a file operation to be accomplished later"""
def __init__(self, init_thunk, final_func, error_handler):
"""RobustAction initializer
"""Action initializer
All the thunks are functions whose return value will be
ignored. init_thunk should not make any irreversible changes
......@@ -96,10 +99,8 @@ class RobustAction:
def default_error_handler(self, exc, ran_init, init_val): pass
class Robust:
"""Contains various methods designed to make things safer"""
null_action = RobustAction(None, None, None)
def chain(*robust_action_list):
null_action = Action(None, None, None)
def chain(*robust_action_list):
"""Return chain tying together a number of robust actions
The whole chain will be aborted if some error occurs in
......@@ -122,9 +123,9 @@ class Robust:
ra.error_handler(exc, 1, init_val)
for ra in ras_with_started_inits[len(init_return_vals):]:
ra.error_handler(exc, None, None)
return RobustAction(init, final, error)
return Action(init, final, error)
def chain_nested(*robust_action_list):
def chain_nested(*robust_action_list):
"""Like chain but final actions performed in reverse order"""
ras_with_started_inits, init_vals = [], []
def init():
......@@ -144,19 +145,19 @@ class Robust:
ra.error_handler(exc, 1, init_val)
for ra in ras_with_started_inits[len(init_vals):]:
ra.error_handler(exc, None, None)
return RobustAction(init, final, error)
return Action(init, final, error)
def make_tf_robustaction(init_thunk, tempfiles, final_renames = None):
"""Shortcut RobustAction creator when only tempfiles involved
def make_tf_robustaction(init_thunk, tempfiles, final_renames = None):
"""Shortcut Action creator when only tempfiles involved
Often the robust action will just consist of some initial
stage, renaming tempfiles in the final stage, and deleting
them if there is an error. This function makes it easier to
create RobustActions of that type.
create Actions of that type.
"""
if isinstance(tempfiles, TempFile): tempfiles = (tempfiles,)
if isinstance(final_renames, RPath): final_renames = (final_renames,)
if isinstance(tempfiles, TempFile.TempFile): tempfiles = (tempfiles,)
if isinstance(final_renames, rpath.RPath): final_renames = (final_renames,)
if final_renames is None: final_renames = [None] * len(tempfiles)
assert len(tempfiles) == len(final_renames)
......@@ -169,9 +170,9 @@ class Robust:
return init_val
def error(exc, ran_init, init_val):
for tf in tempfiles: tf.delete()
return RobustAction(init_thunk, final, error)
return Action(init_thunk, final, error)
def copy_action(rorpin, rpout):
def copy_action(rorpin, rpout):
"""Return robust action copying rorpin to rpout
The source can be a rorp or an rpath. Does not recurse. If
......@@ -182,9 +183,9 @@ class Robust:
tfl = [None] # Need some mutable state to hold tf value
def init():
if not (rorpin.isdir() and rpout.isdir()): # already a dir
tfl[0] = tf = TempFileManager.new(rpout)
tfl[0] = tf = TempFile.new(rpout)
if rorpin.isreg(): tf.write_from_fileobj(rorpin.open("rb"))
else: RPath.copy(rorpin, tf)
else: rpath.copy(rorpin, tf)
return tf
else: return None
def final(tf):
......@@ -194,33 +195,33 @@ class Robust:
return rpout
def error(exc, ran_init, init_val):
if tfl[0]: tfl[0].delete()
return RobustAction(init, final, error)
return Action(init, final, error)
def copy_with_attribs_action(rorpin, rpout, compress = None):
def copy_with_attribs_action(rorpin, rpout, compress = None):
"""Like copy_action but also copy attributes"""
tfl = [None] # Need some mutable state for error handler
def init():
if not (rorpin.isdir() and rpout.isdir()): # already a dir
tfl[0] = tf = TempFileManager.new(rpout)
tfl[0] = tf = TempFile.new(rpout)
if rorpin.isreg():
tf.write_from_fileobj(rorpin.open("rb"), compress)
else: RPath.copy(rorpin, tf)
else: rpath.copy(rorpin, tf)
if tf.lstat(): # Some files, like sockets, won't be created
RPathStatic.copy_attribs(rorpin, tf)
rpath.copy_attribs(rorpin, tf)
return tf
else: return None
def final(tf):
if rorpin.isdir() and rpout.isdir():
RPath.copy_attribs(rorpin, rpout)
rpath.copy_attribs(rorpin, rpout)
elif tf and tf.lstat():
if rpout.isdir(): rpout.delete() # can't rename over dir
tf.rename(rpout)
return rpout
def error(exc, ran_init, init_val):
if tfl[0]: tfl[0].delete()
return RobustAction(init, final, error)
return Action(init, final, error)
def copy_attribs_action(rorpin, rpout):
def copy_attribs_action(rorpin, rpout):
"""Return action which just copies attributes
Copying attributes is already pretty atomic, so just run
......@@ -228,31 +229,31 @@ class Robust:
"""
def final(init_val):
RPath.copy_attribs(rorpin, rpout)
rpath.copy_attribs(rorpin, rpout)
return rpout
return RobustAction(None, final, None)
return Action(None, final, None)
def symlink_action(rpath, linktext):
def symlink_action(rpath, linktext):
"""Return symlink action by moving one file over another"""
tf = TempFileManager.new(rpath)
tf = TempFile.new(rpath)
def init(): tf.symlink(linktext)
return Robust.make_tf_robustaction(init, tf, rpath)
return make_tf_robustaction(init, tf, rpath)
def destructive_write_action(rp, s):
def destructive_write_action(rp, s):
"""Return action writing string s to rpath rp in robust way
This will overwrite any data currently in rp.
"""
tf = TempFileManager.new(rp)
tf = TempFile.new(rp)
def init():
fp = tf.open("wb")
fp.write(s)
fp.close()
tf.setdata()
return Robust.make_tf_robustaction(init, tf, rp)
return make_tf_robustaction(init, tf, rp)
def check_common_error(error_handler, function, args = []):
def check_common_error(error_handler, function, args = []):
"""Apply function to args, if error, run error_handler on exception
This uses the catch_error predicate below to only catch
......@@ -262,7 +263,7 @@ class Robust:
try: return function(*args)
except Exception, exc:
TracebackArchive.add([function] + list(args))
if Robust.catch_error(exc):
if catch_error(exc):
Log.exception()
conn = Globals.backup_writer
if conn is not None: # increment error count
......@@ -273,12 +274,11 @@ class Robust:
Log.exception(1, 2)
raise
def catch_error(exc):
def catch_error(exc):
"""Return true if exception exc should be caught"""
for exception_class in (SkipFileException, DSRPPermError,
RPathException, Rdiff.RdiffException,
librsync.librsyncError,
C.UnknownFileTypeError):
for exception_class in (rpath.SkipFileException, rpath.RPathException,
librsync.librsyncError, C.UnknownFileTypeError):
if isinstance(exc, exception_class): return 1
if (isinstance(exc, EnvironmentError) and
errno.errorcode[exc[0]] in ('EPERM', 'ENOENT', 'EACCES', 'EBUSY',
......@@ -288,25 +288,23 @@ class Robust:
return 1
return 0
def listrp(rp):
def listrp(rp):
"""Like rp.listdir() but return [] if error, and sort results"""
def error_handler(exc):
Log("Error listing directory %s" % rp.path, 2)
return []
dir_listing = Robust.check_common_error(error_handler, rp.listdir)
dir_listing = check_common_error(error_handler, rp.listdir)
dir_listing.sort()
return dir_listing
def signal_handler(signum, frame):
def signal_handler(signum, frame):
"""This is called when signal signum is caught"""
raise SignalException(signum)
def install_signal_handlers():
def install_signal_handlers():
"""Install signal handlers on current connection"""
for signum in [signal.SIGQUIT, signal.SIGHUP, signal.SIGTERM]:
signal.signal(signum, Robust.signal_handler)
MakeStatic(Robust)
signal.signal(signum, signal_handler)
class SignalException(Exception):
......@@ -335,91 +333,7 @@ class TracebackArchive:
"-------------------------------------------" %
("\n".join(cls._traceback_strings),), 3)
MakeClass(TracebackArchive)
class TempFileManager:
"""Manage temp files"""
# This is a connection-specific list of temp files, to be cleaned
# up before rdiff-backup exits.
_tempfiles = []
# To make collisions less likely, this gets put in the file name
# and incremented whenever a new file is requested.
_tfindex = 0
def new(cls, rp_base, same_dir = 1):
"""Return new tempfile that isn't in use.
If same_dir, tempfile will be in same directory as rp_base.
Otherwise, use tempfile module to get filename.
"""
conn = rp_base.conn
if conn is not Globals.local_connection:
return conn.TempFileManager.new(rp_base, same_dir)
def find_unused(conn, dir):
"""Find an unused tempfile with connection conn in directory dir"""
while 1:
if cls._tfindex > 100000000:
Log("Resetting index", 2)
cls._tfindex = 0
tf = TempFile(conn, os.path.join(dir,
"rdiff-backup.tmp.%d" % cls._tfindex))
cls._tfindex = cls._tfindex+1
if not tf.lstat(): return tf
if same_dir: tf = find_unused(conn, rp_base.dirsplit()[0])
else: tf = TempFile(conn, tempfile.mktemp())
cls._tempfiles.append(tf)
return tf
def remove_listing(cls, tempfile):
"""Remove listing of tempfile"""
if Globals.local_connection is not tempfile.conn:
tempfile.conn.TempFileManager.remove_listing(tempfile)
elif tempfile in cls._tempfiles: cls._tempfiles.remove(tempfile)
def delete_all(cls):
"""Delete all remaining tempfiles"""
for tf in cls._tempfiles[:]: tf.delete()
MakeClass(TempFileManager)
from rpath import *
class TempFile(RPath):
"""Like an RPath, but keep track of which ones are still here"""
def rename(self, rp_dest):
"""Rename temp file to permanent location, possibly overwriting"""
if self.isdir() and not rp_dest.isdir():
# Cannot move a directory directly over another file
rp_dest.delete()
if (isinstance(rp_dest, DSRPath) and rp_dest.delay_perms
and not self.hasfullperms()):
# If we are moving to a delayed perm directory, delay
# permission change on destination.
rp_dest.chmod(self.getperms())
self.chmod(0700)
RPathStatic.rename(self, rp_dest)
# Sometimes this just seems to fail silently, as in one
# hardlinked twin is moved over the other. So check to make
# sure below.
self.setdata()
if self.lstat():
rp_dest.delete()
RPathStatic.rename(self, rp_dest)
self.setdata()
if self.lstat(): raise OSError("Cannot rename tmp file correctly")
TempFileManager.remove_listing(self)
def delete(self):
RPath.delete(self)
TempFileManager.remove_listing(self)
static.MakeClass(TracebackArchive)
class SaveState:
......@@ -470,9 +384,8 @@ class SaveState:
if last_file_rorp:
symtext = apply(os.path.join,
('increments',) + last_file_rorp.index)
return Robust.symlink_action(cls._last_file_sym, symtext)
else: return RobustAction(None, lambda init_val: cls.touch_last_file(),
None)
return symlink_action(cls._last_file_sym, symtext)
else: return Action(None, lambda init_val: cls.touch_last_file(), None)
def checkpoint(cls, ITR, finalizer, last_file_rorp, override = None):
"""Save states of tree reducer and finalizer during inc backup
......@@ -486,8 +399,7 @@ class SaveState:
cls._last_checkpoint_time = time.time()
Log("Writing checkpoint time %s" % cls._last_checkpoint_time, 7)
state_string = cPickle.dumps((ITR, finalizer))
Robust.chain(Robust.destructive_write_action(cls._checkpoint_rp,
state_string),
chain(destructive_write_action(cls._checkpoint_rp, state_string),
cls.record_last_file_action(last_file_rorp)).execute()
def checkpoint_needed(cls):
......@@ -500,7 +412,7 @@ class SaveState:
for rp in Resume.get_relevant_rps(): rp.delete()
if Globals.preserve_hardlinks: Hardlink.remove_all_checkpoints()
MakeClass(SaveState)
static.MakeClass(SaveState)
class ResumeException(Exception):
......@@ -527,7 +439,7 @@ class Resume:
for si in cls.get_sis_covering_index(index):
if si.time > later_than: return si.time
raise SkipFileException("Index %s already covered, skipping" %
raise rpath.SkipFileException("Index %s already covered, skipping" %
str(index))
def get_sis_covering_index(cls, index):
......@@ -667,7 +579,7 @@ class Resume:
return None
assert None
MakeClass(Resume)
static.MakeClass(Resume)
class ResumeSessionInfo:
......@@ -691,8 +603,3 @@ class ResumeSessionInfo:
self.ITR, self.finalizer, = ITR, finalizer
from log import *
from destructive_stepping import *
import Time, Rdiff, librsync
from highlevel import *
......@@ -17,31 +17,25 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
"""Operations on Iterators of Read Only Remote Paths"""
"""Operations on Iterators of Read Only Remote Paths
from __future__ import generators
import tempfile, UserList, types, librsync
from static import *
from log import *
from rpath import *
from robust import *
from iterfile import *
import Globals, Rdiff, Hardlink
The main structure will be an iterator that yields RORPaths.
Every RORPath has a "raw" form that makes it more amenable to
being turned into a file. The raw form of the iterator yields
each RORPath in the form of the tuple (index, data_dictionary,
files), where files is the number of files attached (usually 1 or
0). After that, if a file is attached, it yields that file.
class RORPIterException(Exception): pass
"""
class RORPIter:
"""Functions relating to iterators of Read Only RPaths
from __future__ import generators
import tempfile, UserList, types, librsync, Globals, Rdiff, \
Hardlink, robust, log, static, rpath, iterfile, TempFile
The main structure will be an iterator that yields RORPaths.
Every RORPath has a "raw" form that makes it more amenable to
being turned into a file. The raw form of the iterator yields
each RORPath in the form of the tuple (index, data_dictionary,
files), where files is the number of files attached (usually 1 or
0). After that, if a file is attached, it yields that file.
"""
def ToRaw(rorp_iter):
class RORPIterException(Exception): pass
def ToRaw(rorp_iter):
"""Convert a rorp iterator to raw form"""
for rorp in rorp_iter:
if rorp.file:
......@@ -49,37 +43,37 @@ class RORPIter:
yield rorp.file
else: yield (rorp.index, rorp.data, 0)
def FromRaw(raw_iter):
def FromRaw(raw_iter):
"""Convert raw rorp iter back to standard form"""
for index, data, num_files in raw_iter:
rorp = RORPath(index, data)
rorp = rpath.RORPath(index, data)
if num_files:
assert num_files == 1, "Only one file accepted right now"
rorp.setfile(RORPIter.getnext(raw_iter))
rorp.setfile(getnext(raw_iter))
yield rorp
def ToFile(rorp_iter):
def ToFile(rorp_iter):
"""Return file version of iterator"""
return FileWrappingIter(RORPIter.ToRaw(rorp_iter))
return iterfile.FileWrappingIter(ToRaw(rorp_iter))
def FromFile(fileobj):
def FromFile(fileobj):
"""Recover rorp iterator from file interface"""
return RORPIter.FromRaw(IterWrappingFile(fileobj))
return FromRaw(iterfile.IterWrappingFile(fileobj))
def IterateRPaths(base_rp):
def IterateRPaths(base_rp):
"""Return an iterator yielding RPaths with given base rp"""
yield base_rp
if base_rp.isdir():
dirlisting = base_rp.listdir()
dirlisting.sort()
for filename in dirlisting:
for rp in RORPIter.IterateRPaths(base_rp.append(filename)):
for rp in IterateRPaths(base_rp.append(filename)):
yield rp
def Signatures(rp_iter):
def Signatures(rp_iter):
"""Yield signatures of rpaths in given rp_iter"""
def error_handler(exc, rp):
Log("Error generating signature for %s" % rp.path)
log.Log("Error generating signature for %s" % rp.path)
return None
for rp in rp_iter:
......@@ -89,17 +83,17 @@ class RORPIter:
if rp.isreg():
if rp.isflaglinked(): rorp.flaglinked()
else:
fp = Robust.check_common_error(
fp = robust.check_common_error(
error_handler, Rdiff.get_signature, (rp,))
if fp: rorp.setfile(fp)
else: continue
yield rorp
def GetSignatureIter(base_rp):
def GetSignatureIter(base_rp):
"""Return a signature iterator recurring over the base_rp"""
return RORPIter.Signatures(RORPIter.IterateRPaths(base_rp))
return Signatures(IterateRPaths(base_rp))
def CollateIterators(*rorp_iters):
def CollateIterators(*rorp_iters):
"""Collate RORPath iterators by index
So it takes two or more iterators of rorps and returns an
......@@ -111,7 +105,7 @@ class RORPIter:
# rorps[i] is None means that it is time to replenish it.
iter_num = len(rorp_iters)
if iter_num == 2:
return RORPIter.Collate2Iters(rorp_iters[0], rorp_iters[1])
return Collate2Iters(rorp_iters[0], rorp_iters[1])
overflow = [None] * iter_num
rorps = overflow[:]
......@@ -144,7 +138,7 @@ class RORPIter:
yield IndexedTuple(index, yieldval)
return yield_tuples(iter_num, overflow, rorps)
def Collate2Iters(riter1, riter2):
def Collate2Iters(riter1, riter2):
"""Special case of CollateIterators with 2 arguments
This does the same thing but is faster because it doesn't have
......@@ -181,13 +175,13 @@ class RORPIter:
yield IndexedTuple(index2, (None, relem2))
relem2 = None
def getnext(iter):
def getnext(iter):
"""Return the next element of an iterator, raising error if none"""
try: next = iter.next()
except StopIteration: raise RORPIterException("Unexpected end to iter")
return next
def GetDiffIter(sig_iter, new_iter):
def GetDiffIter(sig_iter, new_iter):
"""Return delta iterator from sig_iter to new_iter
The accompanying file for each will be a delta as produced by
......@@ -198,14 +192,14 @@ class RORPIter:
full RPaths.
"""
collated_iter = RORPIter.CollateIterators(sig_iter, new_iter)
for rorp, rp in collated_iter: yield RORPIter.diffonce(rorp, rp)
collated_iter = CollateIterators(sig_iter, new_iter)
for rorp, rp in collated_iter: yield diffonce(rorp, rp)
def diffonce(sig_rorp, new_rp):
def diffonce(sig_rorp, new_rp):
"""Return one diff rorp, based from signature rorp and orig rp"""
if sig_rorp and Globals.preserve_hardlinks and sig_rorp.isflaglinked():
if new_rp: diff_rorp = new_rp.getRORPath()
else: diff_rorp = RORPath(sig_rorp.index)
else: diff_rorp = rpath.RORPath(sig_rorp.index)
diff_rorp.flaglinked()
return diff_rorp
elif sig_rorp and sig_rorp.isreg() and new_rp and new_rp.isreg():
......@@ -224,41 +218,39 @@ class RORPIter:
else:
# Just send over originial if diff isn't appropriate
if sig_rorp: sig_rorp.close_if_necessary()
if not new_rp: return RORPath(sig_rorp.index)
if not new_rp: return rpath.RORPath(sig_rorp.index)
elif new_rp.isreg():
diff_rorp = new_rp.getRORPath(1)
diff_rorp.set_attached_filetype('snapshot')
return diff_rorp
else: return new_rp.getRORPath()
def PatchIter(base_rp, diff_iter):
def PatchIter(base_rp, diff_iter):
"""Patch the appropriate rps in basis_iter using diff_iter"""
basis_iter = RORPIter.IterateRPaths(base_rp)
collated_iter = RORPIter.CollateIterators(basis_iter, diff_iter)
basis_iter = IterateRPaths(base_rp)
collated_iter = CollateIterators(basis_iter, diff_iter)
for basisrp, diff_rorp in collated_iter:
RORPIter.patchonce_action(base_rp, basisrp, diff_rorp).execute()
patchonce_action(base_rp, basisrp, diff_rorp).execute()
def patchonce_action(base_rp, basisrp, diff_rorp):
def patchonce_action(base_rp, basisrp, diff_rorp):
"""Return action patching basisrp using diff_rorp"""
assert diff_rorp, "Missing diff index %s" % basisrp.index
if not diff_rorp.lstat():
return RobustAction(None, lambda init_val: basisrp.delete(), None)
return robust.Action(None, lambda init_val: basisrp.delete(), None)
if Globals.preserve_hardlinks and diff_rorp.isflaglinked():
if not basisrp: basisrp = base_rp.new_index(diff_rorp.index)
tf = TempFileManager.new(basisrp)
tf = TempFile.new(basisrp)
def init(): Hardlink.link_rp(diff_rorp, tf, basisrp)
return Robust.make_tf_robustaction(init, tf, basisrp)
return robust.make_tf_robustaction(init, tf, basisrp)
elif basisrp and basisrp.isreg() and diff_rorp.isreg():
if diff_rorp.get_attached_filetype() != 'diff':
raise RPathException("File %s appears to have changed during"
raise rpath.RPathException("File %s appears to have changed during"
" processing, skipping" % (basisrp.path,))
return Rdiff.patch_with_attribs_action(basisrp, diff_rorp)
else: # Diff contains whole file, just copy it over
if not basisrp: basisrp = base_rp.new_index(diff_rorp.index)
return Robust.copy_with_attribs_action(diff_rorp, basisrp)
MakeStatic(RORPIter)
return robust.copy_with_attribs_action(diff_rorp, basisrp)
class IndexedTuple(UserList.UserList):
......@@ -299,3 +291,300 @@ class IndexedTuple(UserList.UserList):
def __str__(self):
return "(%s).%s" % (", ".join(map(str, self.data)), self.index)
class DirHandler:
"""Handle directories when entering and exiting in mirror
The problem is that we may need to write to a directory that may
have only read and exec permissions. Also, when leaving a
directory tree, we may have modified the directory and thus
changed the mod and access times. These need to be updated when
leaving.
"""
def __init__(self, rootrp):
"""DirHandler initializer - call with root rpath of mirror dir"""
self.rootrp = rootrp
assert rootrp.index == ()
self.cur_dir_index = None # Current directory we have descended into
self.last_index = None # last index processed
# This dictionary maps indicies to (rpath, (atime, mtime),
# perms) triples. Either or both of the time pair and perms
# can be None, which means not to update the times or the
# perms when leaving. We don't have to update the perms if we
# didn't have to change them in the first place. If a
# directory is explicitly given, then we don't have to update
# anything because it will be done by the normal process.
self.index_dict = {}
def process_old_directories(self, new_dir_index):
"""Update times/permissions for directories we are leaving
Returns greatest index of the current index that has been seen
before (i.e. no need to process up to then as new dir).
"""
if self.cur_dir_index is None: return -1 # no previous directory
i = len(self.cur_dir_index)
while 1:
if new_dir_index[:i] == self.cur_dir_index[:i]:
return i
self.process_old_dir(self.cur_dir_index[:i])
i-=1
def process_old_dir(self, dir_index):
"""Process outstanding changes for given dir index"""
rpath, times, perms = self.index_dict[dir_index]
if times: apply(rpath.settime, times)
if perms: rpath.chmod(perms)
def init_new_dirs(self, rpath, new_dir_index, common_dir_index):
"""Initialize any new directories
Record the time, and change permissions if no write access.
Use rpath if it is given to access permissions and times.
"""
for i in range(common_dir_index, len(new_dir_index)):
process_index = new_dir_index[:i]
if rpath.index == process_index:
self.index_dict[process_index] = (None, None, None)
else:
new_rpath = self.rootrp.new_index(process_index)
if new_rpath.hasfullperms(): perms = None
else: perms = new_rpath.getperms()
times = (new_rpath.getatime(), new_rpath.getmtime())
self.index_dict[process_index] = new_rpath, times, perms
def __call__(self, rpath):
"""Given rpath, process containing directories"""
if rpath.isdir(): new_dir_index = rpath.index
elif not rpath.index: return # no directory contains root
else: new_dir_index = rpath.index[:-1]
common_dir_index = self.process_old_directories(new_dir_index)
self.init_new_dirs(rpath, new_dir_index, common_dir_index)
self.cur_dir_index = new_dir_index
def Finish(self):
"""Process any remaining directories"""
indicies = self.index_dict.keys()
indicies.sort()
assert len(indicies) >= 1, indicies
indicies.reverse()
map(self.process_old_dir, indicies)
def FillInIter(rpiter, rootrp):
"""Given ordered rpiter and rootrp, fill in missing indicies with rpaths
For instance, suppose rpiter contains rpaths with indicies (),
(1,2), (2,5). Then return iter with rpaths (), (1,), (1,2), (2,),
(2,5). This is used when we need to process directories before or
after processing a file in that directory.
"""
# Handle first element as special case
first_rp = rpiter.next() # StopIteration gets passed upwards
cur_index = first_rp.index
for i in range(len(cur_index)):
yield rootrp.new_index(cur_index[:i])
yield first_rp
del first_rp
old_index = cur_index
# Now do the others (1,2,3) (1,4,5)
for rp in rpiter:
cur_index = rp.index
if not cur_index[:-1] == old_index[:-1]: # Handle special case quickly
for i in range(1, len(cur_index)): # i==0 case already handled
if cur_index[:i] != old_index[:i]:
yield rootrp.new_index(cur_index[:i])
yield rp
old_index = cur_index
class IterTreeReducer:
"""Tree style reducer object for iterator
The indicies of a RORPIter form a tree type structure. This class
can be used on each element of an iter in sequence and the result
will be as if the corresponding tree was reduced. This tries to
bridge the gap between the tree nature of directories, and the
iterator nature of the connection between hosts and the temporal
order in which the files are processed.
"""
def __init__(self, branch_class, branch_args):
"""ITR initializer"""
self.branch_class = branch_class
self.branch_args = branch_args
self.index = None
self.root_branch = branch_class(*branch_args)
self.branches = [self.root_branch]
def finish_branches(self, index):
"""Run Finish() on all branches index has passed
When we pass out of a branch, delete it and process it with
the parent. The innermost branches will be the last in the
list. Return None if we are out of the entire tree, and 1
otherwise.
"""
branches = self.branches
while 1:
to_be_finished = branches[-1]
base_index = to_be_finished.base_index
if base_index != index[:len(base_index)]:
# out of the tree, finish with to_be_finished
to_be_finished.call_end_proc()
del branches[-1]
if not branches: return None
branches[-1].branch_process(to_be_finished)
else: return 1
def add_branch(self, index):
"""Return branch of type self.branch_class, add to branch list"""
branch = self.branch_class(*self.branch_args)
branch.base_index = index
self.branches.append(branch)
return branch
def process_w_branch(self, branch, args):
"""Run start_process on latest branch"""
robust.check_common_error(branch.on_error,
branch.start_process, args)
if not branch.caught_exception: branch.start_successful = 1
def Finish(self):
"""Call at end of sequence to tie everything up"""
while 1:
to_be_finished = self.branches.pop()
to_be_finished.call_end_proc()
if not self.branches: break
self.branches[-1].branch_process(to_be_finished)
def __call__(self, *args):
"""Process args, where args[0] is current position in iterator
Returns true if args successfully processed, false if index is
not in the current tree and thus the final result is
available.
Also note below we set self.index after doing the necessary
start processing, in case there is a crash in the middle.
"""
index = args[0]
if self.index is None:
self.root_branch.base_index = index
self.process_w_branch(self.root_branch, args)
self.index = index
return 1
if index <= self.index:
log.Log("Warning: oldindex %s >= newindex %s" %
(self.index, index), 2)
return 1
if self.finish_branches(index) is None:
return None # We are no longer in the main tree
last_branch = self.branches[-1]
if last_branch.start_successful:
if last_branch.can_fast_process(*args):
last_branch.fast_process(*args)
else:
branch = self.add_branch(index)
self.process_w_branch(branch, args)
else: last_branch.log_prev_error(index)
self.index = index
return 1
class ITRBranch:
"""Helper class for IterTreeReducer below
There are five stub functions below: start_process, end_process,
branch_process, can_fast_process, and fast_process. A class that
subclasses this one will probably fill in these functions to do
more.
It is important that this class be pickable, so keep that in mind
when subclassing (this is used to resume failed sessions).
"""
base_index = index = None
finished = None
caught_exception = start_successful = None
def call_end_proc(self):
"""Runs the end_process on self, checking for errors"""
if self.finished or not self.start_successful:
self.caught_exception = 1
if self.caught_exception: self.log_prev_error(self.base_index)
else: robust.check_common_error(self.on_error, self.end_process)
self.finished = 1
def start_process(self, *args):
"""Do some initial processing (stub)"""
pass
def end_process(self):
"""Do any final processing before leaving branch (stub)"""
pass
def branch_process(self, branch):
"""Process a branch right after it is finished (stub)"""
assert branch.finished
pass
def can_fast_process(self, *args):
"""True if object can be processed without new branch (stub)"""
return None
def fast_process(self, *args):
"""Process args without new child branch (stub)"""
pass
def on_error(self, exc, *args):
"""This is run on any exception in start/end-process"""
self.caught_exception = 1
if args and args[0] and isinstance(args[0], tuple):
filename = os.path.join(*args[0])
elif self.index: filename = os.path.join(*self.index)
else: filename = "."
log.Log("Error '%s' processing %s" % (exc, filename), 2)
def log_prev_error(self, index):
"""Call function if no pending exception"""
log.Log("Skipping %s because of previous error" %
(os.path.join(*index),), 2)
class DestructiveSteppingFinalizer(ITRBranch):
"""Finalizer that can work on an iterator of dsrpaths
The reason we have to use an IterTreeReducer is that some files
should be updated immediately, but for directories we sometimes
need to update all the files in the directory before finally
coming back to it.
"""
dsrpath = None
def start_process(self, index, dsrpath):
self.dsrpath = dsrpath
def end_process(self):
if self.dsrpath: self.dsrpath.write_changes()
def can_fast_process(self, index, dsrpath):
return not self.dsrpath.isdir()
def fast_process(self, index, dsrpath):
if self.dsrpath: self.dsrpath.write_changes()
......@@ -35,15 +35,23 @@ are dealing with are local or remote.
"""
import os, stat, re, sys, shutil, gzip, socket
from static import *
import os, stat, re, sys, shutil, gzip, socket, time, shutil
import Globals, FilenameMapping, Time, static, log
class SkipFileException(Exception):
"""Signal that the current file should be skipped but then continue
This exception will often be raised when there is problem reading
an individual file, but it makes sense for the rest of the backup
to keep going.
"""
pass
class RPathException(Exception): pass
class RPathStatic:
"""Contains static methods for use with RPaths"""
def copyfileobj(inputfp, outputfp):
def copyfileobj(inputfp, outputfp):
"""Copies file inputfp to outputfp in blocksize intervals"""
blocksize = Globals.blocksize
while 1:
......@@ -51,7 +59,7 @@ class RPathStatic:
if not inbuf: break
outputfp.write(inbuf)
def cmpfileobj(fp1, fp2):
def cmpfileobj(fp1, fp2):
"""True if file objects fp1 and fp2 contain same data"""
blocksize = Globals.blocksize
while 1:
......@@ -60,31 +68,31 @@ class RPathStatic:
if buf1 != buf2: return None
elif not buf1: return 1
def check_for_files(*rps):
def check_for_files(*rps):
"""Make sure that all the rps exist, raise error if not"""
for rp in rps:
if not rp.lstat():
raise RPathException("File %s does not exist" % rp.path)
def move(rpin, rpout):
def move(rpin, rpout):
"""Move rpin to rpout, renaming if possible"""
try: RPath.rename(rpin, rpout)
try: rename(rpin, rpout)
except os.error:
RPath.copy(rpin, rpout)
copy(rpin, rpout)
rpin.delete()
def copy(rpin, rpout):
def copy(rpin, rpout):
"""Copy RPath rpin to rpout. Works for symlinks, dirs, etc."""
Log("Regular copying %s to %s" % (rpin.index, rpout.path), 6)
log.Log("Regular copying %s to %s" % (rpin.index, rpout.path), 6)
if not rpin.lstat():
raise RPathException, ("File %s does not exist" % rpin.index)
if rpout.lstat():
if rpin.isreg() or not RPath.cmp(rpin, rpout):
if rpin.isreg() or not cmp(rpin, rpout):
rpout.delete() # easier to write that compare
else: return
if rpin.isreg(): RPath.copy_reg_file(rpin, rpout)
if rpin.isreg(): copy_reg_file(rpin, rpout)
elif rpin.isdir(): rpout.mkdir()
elif rpin.issym(): rpout.symlink(rpin.readlink())
elif rpin.ischardev():
......@@ -97,7 +105,7 @@ class RPathStatic:
elif rpin.issock(): rpout.mksock()
else: raise RPathException("File %s has unknown type" % rpin.path)
def copy_reg_file(rpin, rpout):
def copy_reg_file(rpin, rpout):
"""Copy regular file rpin to rpout, possibly avoiding connection"""
try:
if rpout.conn is rpin.conn:
......@@ -107,18 +115,18 @@ class RPathStatic:
except AttributeError: pass
rpout.write_from_fileobj(rpin.open("rb"))
def cmp(rpin, rpout):
def cmp(rpin, rpout):
"""True if rpin has the same data as rpout
cmp does not compare file ownership, permissions, or times, or
examine the contents of a directory.
"""
RPath.check_for_files(rpin, rpout)
check_for_files(rpin, rpout)
if rpin.isreg():
if not rpout.isreg(): return None
fp1, fp2 = rpin.open("rb"), rpout.open("rb")
result = RPathStatic.cmpfileobj(fp1, fp2)
result = cmpfileobj(fp1, fp2)
if fp1.close() or fp2.close():
raise RPathException("Error closing file")
return result
......@@ -135,28 +143,29 @@ class RPathStatic:
elif rpin.issock(): return rpout.issock()
else: raise RPathException("File %s has unknown type" % rpin.path)
def copy_attribs(rpin, rpout):
def copy_attribs(rpin, rpout):
"""Change file attributes of rpout to match rpin
Only changes the chmoddable bits, uid/gid ownership, and
timestamps, so both must already exist.
"""
Log("Copying attributes from %s to %s" % (rpin.index, rpout.path), 7)
RPath.check_for_files(rpin, rpout)
log.Log("Copying attributes from %s to %s" %
(rpin.index, rpout.path), 7)
check_for_files(rpin, rpout)
if rpin.issym(): return # symlinks have no valid attributes
if Globals.change_ownership: apply(rpout.chown, rpin.getuidgid())
rpout.chmod(rpin.getperms())
if not rpin.isdev(): rpout.setmtime(rpin.getmtime())
def cmp_attribs(rp1, rp2):
def cmp_attribs(rp1, rp2):
"""True if rp1 has the same file attributes as rp2
Does not compare file access times. If not changing
ownership, do not check user/group id.
"""
RPath.check_for_files(rp1, rp2)
check_for_files(rp1, rp2)
if Globals.change_ownership and rp1.getuidgid() != rp2.getuidgid():
result = None
elif rp1.getperms() != rp2.getperms(): result = None
......@@ -165,50 +174,41 @@ class RPathStatic:
elif rp1.isblkdev() and rp2.isblkdev(): result = 1
elif rp1.ischardev() and rp2.ischardev(): result = 1
else: result = (rp1.getmtime() == rp2.getmtime())
Log("Compare attribs %s and %s: %s" % (rp1.path, rp2.path, result), 7)
log.Log("Compare attribs of %s and %s: %s" %
(rp1.path, rp2.path, result), 7)
return result
def copy_with_attribs(rpin, rpout):
def copy_with_attribs(rpin, rpout):
"""Copy file and then copy over attributes"""
RPath.copy(rpin, rpout)
RPath.copy_attribs(rpin, rpout)
copy(rpin, rpout)
copy_attribs(rpin, rpout)
def quick_cmp_with_attribs(rp1, rp2):
def quick_cmp_with_attribs(rp1, rp2):
"""Quicker version of cmp_with_attribs
Instead of reading all of each file, assume that regular files
are the same if the attributes compare.
"""
if not RPath.cmp_attribs(rp1, rp2): return None
if not cmp_attribs(rp1, rp2): return None
if rp1.isreg() and rp2.isreg() and (rp1.getlen() == rp2.getlen()):
return 1
return RPath.cmp(rp1, rp2)
return cmp(rp1, rp2)
def cmp_with_attribs(rp1, rp2):
def cmp_with_attribs(rp1, rp2):
"""Combine cmp and cmp_attribs"""
return RPath.cmp_attribs(rp1, rp2) and RPath.cmp(rp1, rp2)
return cmp_attribs(rp1, rp2) and cmp(rp1, rp2)
def rename(rp_source, rp_dest):
def rename(rp_source, rp_dest):
"""Rename rp_source to rp_dest"""
assert rp_source.conn is rp_dest.conn
Log(lambda: "Renaming %s to %s" % (rp_source.path, rp_dest.path), 7)
log.Log(lambda: "Renaming %s to %s" %
(rp_source.path, rp_dest.path), 7)
rp_source.conn.os.rename(rp_source.path, rp_dest.path)
rp_dest.data = rp_source.data
rp_source.data = {'type': None}
# If we are moving to a DSRPath, assume that the current times
# are the intended ones. We need to save them now in case
# they are changed later.
if isinstance(rp_dest, DSRPath):
if rp_dest.delay_mtime:
if 'mtime' in rp_dest.data:
rp_dest.setmtime(rp_dest.data['mtime'])
if rp_dest.delay_atime:
if 'atime' in rp_dest.data:
rp_dest.setatime(rp_dest.data['atime'])
def tupled_lstat(filename):
def tupled_lstat(filename):
"""Like os.lstat, but return only a tuple, or None if os.error
Later versions of os.lstat return a special lstat object,
......@@ -219,7 +219,7 @@ class RPathStatic:
try: return tuple(os.lstat(filename))
except os.error: return None
def make_socket_local(rpath):
def make_socket_local(rpath):
"""Make a local socket at the given path
This takes an rpath so that it will be checked by Security.
......@@ -232,20 +232,18 @@ class RPathStatic:
except socket.error, exc:
raise SkipFileException("Socket error: " + str(exc))
def gzip_open_local_read(rpath):
def gzip_open_local_read(rpath):
"""Return open GzipFile. See security note directly above"""
assert rpath.conn is Globals.local_connection
return gzip.GzipFile(rpath.path, "rb")
def open_local_read(rpath):
def open_local_read(rpath):
"""Return open file (provided for security reasons)"""
assert rpath.conn is Globals.local_connection
return open(rpath.path, "rb")
MakeStatic(RPathStatic)
class RORPath(RPathStatic):
class RORPath:
"""Read Only RPath - carry information about a path
These contain information about a file, and possible the file's
......@@ -280,7 +278,7 @@ class RORPath(RPathStatic):
def equal_verbose(self, other):
"""Like __eq__, but log more information. Useful when testing"""
if self.index != other.index:
Log("Index %s != index %s" % (self.index, other.index), 2)
log.Log("Index %s != index %s" % (self.index, other.index), 2)
return None
for key in self.data.keys(): # compare dicts key by key
......@@ -289,15 +287,15 @@ class RORPath(RPathStatic):
# Don't compare gid/uid for symlinks or if not change_ownership
pass
elif key == 'mtime':
Log("%s differs only in mtime, skipping" % (self.path,), 2)
log.Log("%s differs only in mtime, skipping" % (self.path,), 2)
elif key == 'atime' and not Globals.preserve_atime: pass
elif key == 'devloc' or key == 'inode' or key == 'nlink': pass
elif key == 'size' and not self.isreg(): pass
elif (not other.data.has_key(key) or
self.data[key] != other.data[key]):
if not other.data.has_key(key):
Log("Second is missing key %s" % (key,), 2)
else: Log("Value of %s differs: %s vs %s" %
log.Log("Second is missing key %s" % (key,), 2)
else: log.Log("Value of %s differs: %s vs %s" %
(key, self.data[key], other.data[key]), 2)
return None
return 1
......@@ -548,7 +546,7 @@ class RPath(RORPath):
def make_file_dict_old(self):
"""Create the data dictionary"""
statblock = self.conn.RPathStatic.tupled_lstat(self.path)
statblock = self.conn.rpath.tupled_lstat(self.path)
if statblock is None:
return {'type':None}
data = {}
......@@ -614,14 +612,14 @@ class RPath(RORPath):
def settime(self, accesstime, modtime):
"""Change file modification times"""
Log("Setting time of %s to %d" % (self.path, modtime), 7)
log.Log("Setting time of %s to %d" % (self.path, modtime), 7)
self.conn.os.utime(self.path, (accesstime, modtime))
self.data['atime'] = accesstime
self.data['mtime'] = modtime
def setmtime(self, modtime):
"""Set only modtime (access time to present)"""
Log(lambda: "Setting time of %s to %d" % (self.path, modtime), 7)
log.Log(lambda: "Setting time of %s to %d" % (self.path, modtime), 7)
self.conn.os.utime(self.path, (time.time(), modtime))
self.data['mtime'] = modtime
......@@ -632,12 +630,12 @@ class RPath(RORPath):
self.data['gid'] = gid
def mkdir(self):
Log("Making directory " + self.path, 6)
log.Log("Making directory " + self.path, 6)
self.conn.os.mkdir(self.path)
self.setdata()
def rmdir(self):
Log("Removing directory " + self.path, 6)
log.Log("Removing directory " + self.path, 6)
self.conn.os.rmdir(self.path)
self.data = {'type': None}
......@@ -664,13 +662,13 @@ class RPath(RORPath):
def mksock(self):
"""Make a socket at self.path"""
self.conn.RPathStatic.make_socket_local(self)
self.conn.rpath.make_socket_local(self)
self.setdata()
assert self.issock()
def touch(self):
"""Make sure file at self.path exists"""
Log("Touching " + self.path, 7)
log.Log("Touching " + self.path, 7)
self.conn.open(self.path, "w").close()
self.setdata()
assert self.isreg()
......@@ -704,15 +702,14 @@ class RPath(RORPath):
def delete(self):
"""Delete file at self.path. Recursively deletes directories."""
Log("Deleting %s" % self.path, 7)
log.Log("Deleting %s" % self.path, 7)
self.setdata()
if not self.lstat():
Log("Warning: %s does not exist---deleted in meantime?"
log.Log("Warning: %s does not exist---deleted in meantime?"
% (self.path,), 2)
elif self.isdir():
itm = IterTreeReducer(RpathDeleter, [])
for rp in Select(self).set_iter(): itm(rp.index, rp)
itm.Finish()
try: self.rmdir()
except os.error: shutil.rmtree(self.path)
else: self.conn.os.unlink(self.path)
self.setdata()
......@@ -784,11 +781,11 @@ class RPath(RORPath):
if compress:
if mode == "r" or mode == "rb":
return self.conn.RPathStatic.gzip_open_local_read(self)
return self.conn.rpath.gzip_open_local_read(self)
else: return self.conn.gzip.GzipFile(self.path, mode)
else:
if mode == "r" or mode == "rb":
return self.conn.RPathStatic.open_local_read(self)
return self.conn.rpath.open_local_read(self)
else: return self.conn.open(self.path, mode)
def write_from_fileobj(self, fp, compress = None):
......@@ -798,10 +795,10 @@ class RPath(RORPath):
written to self.
"""
Log("Writing file object to " + self.path, 7)
log.Log("Writing file object to " + self.path, 7)
assert not self.lstat(), "File %s already exists" % self.path
outfp = self.open("wb", compress = compress)
RPath.copyfileobj(fp, outfp)
copyfileobj(fp, outfp)
if fp.close() or outfp.close():
raise RPathException("Error closing file")
self.setdata()
......@@ -890,20 +887,20 @@ class RPathFileHook:
# Import these late to avoid circular dependencies
import FilenameMapping
from lazy import *
from selection import *
from highlevel import *
class RpathDeleter(ITRBranch):
"""Delete a directory. Called by RPath.delete()"""
def start_process(self, index, rp):
self.rp = rp
def end_process(self):
if self.rp.isdir(): self.rp.rmdir()
else: self.rp.delete()
def can_fast_process(self, index, rp): return not rp.isdir()
def fast_process(self, index, rp): rp.delete()
#import FilenameMapping
#from lazy import *
#from selection import *
#from highlevel import *
#class RpathDeleter(ITRBranch):
# """Delete a directory. Called by RPath.delete()"""
# def start_process(self, index, rp):
# self.rp = rp
#
# def end_process(self):
# if self.rp.isdir(): self.rp.rmdir()
# else: self.rp.delete()
#
# def can_fast_process(self, index, rp): return not rp.isdir()
# def fast_process(self, index, rp): rp.delete()
......@@ -26,9 +26,8 @@ documentation on what this code does can be found on the man page.
from __future__ import generators
import re
from log import *
from robust import *
import FilenameMapping
from log import Log
import FilenameMapping, robust, rpath, Globals
class SelectError(Exception):
......@@ -81,7 +80,7 @@ class Select:
# This re should not match normal filenames, but usually just globs
glob_re = re.compile("(.*[*?[]|ignorecase\\:)", re.I | re.S)
def __init__(self, rpath, quoted_filenames = None):
def __init__(self, rootrp, quoted_filenames = None):
"""Select initializer. rpath is the root directory
When files have quoted characters in them, quoted_filenames
......@@ -89,9 +88,9 @@ class Select:
version.
"""
assert isinstance(rpath, RPath)
assert isinstance(rootrp, rpath.RPath)
self.selection_functions = []
self.rpath = rpath
self.rpath = rootrp
self.prefix = self.rpath.path
self.quoting_on = Globals.quoting_enabled and quoted_filenames
......@@ -141,8 +140,8 @@ class Select:
and should be included iff something inside is included.
"""
for filename in Robust.listrp(rpath):
new_rpath = Robust.check_common_error(error_handler,
for filename in robust.listrp(rpath):
new_rpath = robust.check_common_error(error_handler,
rpath.append, (filename,))
if new_rpath:
s = sel_func(new_rpath)
......@@ -204,12 +203,12 @@ class Select:
return None
if self.quoting_on:
for subdir in FilenameMapping.get_quoted_dir_children(rpath):
for subdir in get_quoted_dir_children(rpath):
for rp in rec_func(subdir, rec_func, sel_func):
yield rp
else:
for filename in Robust.listrp(rpath):
new_rp = Robust.check_common_error(
for filename in robust.listrp(rpath):
new_rp = robust.check_common_error(
error_handler, rpath.append, [filename])
if new_rp:
for rp in rec_func(new_rp, rec_func, sel_func):
......@@ -646,3 +645,22 @@ probably isn't what you meant.""" %
return res
def get_quoted_dir_children(rpath):
"""For rpath directory, return list of quoted children in dir
This used to be in FilenameMapping, but was moved because it
depends on the robust.listrp routine.
"""
if not rpath.isdir(): return []
dir_pairs = [(FilenameMapping.unquote(filename), filename)
for filename in robust.listrp(rpath)]
dir_pairs.sort() # sort by real index, not quoted part
child_list = []
for unquoted, filename in dir_pairs:
childrp = rpath.append(unquoted)
childrp.quote_path()
child_list.append(childrp)
return child_list
......@@ -19,9 +19,8 @@
"""Generate and process aggregated backup information"""
from lazy import *
import re
import re, os
import Globals, TempFile, robust, Time, rorpiter
class StatsException(Exception): pass
......@@ -216,12 +215,12 @@ class StatsObj:
def write_stats_to_rp(self, rp):
"""Write statistics string to given rpath"""
tf = TempFileManager.new(rp)
tf = TempFile.new(rp)
def init_thunk():
fp = tf.open("w")
fp.write(self.get_stats_string())
fp.close()
Robust.make_tf_robustaction(init_thunk, (tf,), (rp,)).execute()
robust.make_tf_robustaction(init_thunk, (tf,), (rp,)).execute()
def read_stats_from_rp(self, rp):
"""Set statistics from rpath, return self for convenience"""
......@@ -264,7 +263,7 @@ class StatsObj:
return s
class StatsITRB(ITRBranch, StatsObj):
class ITRB(rorpiter.ITRBranch, StatsObj):
"""Keep track of per directory statistics
This is subclassed by the mirroring and incrementing ITRs.
......@@ -339,7 +338,6 @@ class StatsITRB(ITRBranch, StatsObj):
self.__dict__[attr] += branch.__dict__[attr]
from log import *
from increment import *
from robust import *
import Globals
"""commontest - Some functions and constants common to several test cases"""
import os, sys
from rdiff_backup.rpath import *
from rdiff_backup.destructive_stepping import *
from rdiff_backup.highlevel import *
from rdiff_backup import Globals, Hardlink, SetConnections, Main
from rdiff_backup.log import Log
from rdiff_backup.rpath import RPath
from rdiff_backup import Globals, Hardlink, SetConnections, Main, \
selection, highlevel, lazy, Time, rpath
SourceDir = "../src"
AbsCurdir = os.getcwd() # Absolute path name of current directory
......@@ -13,7 +13,7 @@ __no_execute__ = 1 # Keeps the actual rdiff-backup program from running
def Myrm(dirstring):
"""Run myrm on given directory string"""
assert not os.system("%s/myrm %s" % (MiscDir, dirstring))
assert not os.system("rm -rf %s" % (dirstring,))
def Make():
"""Make sure the rdiff-backup script in the source dir is up-to-date"""
......@@ -96,8 +96,8 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir):
"""
# Save attributes of root to restore later
src_root = RPath(Globals.local_connection, src_dir)
dest_root = RPath(Globals.local_connection, dest_dir)
src_root = rpath.RPath(Globals.local_connection, src_dir)
dest_root = rpath.RPath(Globals.local_connection, dest_dir)
dest_rbdir = dest_root.append("rdiff-backup-data")
dest_incdir = dest_rbdir.append("increments")
......@@ -109,9 +109,9 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir):
InternalBackup(source_local, dest_local, src_dir, dest_dir)
dest_root.setdata()
dest_rbdir.delete()
Myrm(dest_rbdir.path)
# Restore old attributes
RPathStatic.copy_attribs(src_root, dest_root)
rpath.copy_attribs(src_root, dest_root)
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
"""Restore mirror_dir to dest_dir at given time
......@@ -133,7 +133,7 @@ def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
mirror_rp, dest_rp = cmd_schemas2rps([mirror_dir, dest_dir], remote_schema)
Time.setcurtime()
inc = get_increment_rp(mirror_rp, time)
if inc: Main.restore(get_increment_rp(mirror_rp, time), dest_rp)
if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp)
else: # use alternate syntax
Main.restore_timestr = str(time)
Main.RestoreAsOf(mirror_rp, dest_rp)
......@@ -173,7 +173,8 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
Log("Comparing %s and %s, hardlinks %s" % (src_rp.path, dest_rp.path,
compare_hardlinks), 3)
src_select, dest_select = Select(src_rp), Select(dest_rp)
src_select = selection.Select(src_rp)
dest_select = selection.Select(dest_rp)
if ignore_tmp_files:
# Ignoring temp files can be useful when we want to check the
......@@ -231,16 +232,17 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
Hardlink.get_indicies(dest_rorp, None)), 3)
return None
if equality_func: result = Iter.equal(dsiter1, dsiter2, 1, equality_func)
if equality_func: result = lazy.Iter.equal(dsiter1, dsiter2,
1, equality_func)
elif compare_hardlinks:
dsiter1 = Hardlink.add_rorp_iter(dsiter1, 1)
dsiter2 = Hardlink.add_rorp_iter(dsiter2, None)
if exclude_rbdir:
result = Iter.equal(dsiter1, dsiter2, 1, hardlink_equal)
else: result = Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
result = lazy.Iter.equal(dsiter1, dsiter2, 1, hardlink_equal)
else: result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
elif not exclude_rbdir:
result = Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
else: result = Iter.equal(dsiter1, dsiter2, 1)
result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
else: result = lazy.Iter.equal(dsiter1, dsiter2, 1)
for i in dsiter1: pass # make sure all files processed anyway
for i in dsiter2: pass
......@@ -269,12 +271,12 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
"""
Globals.set('preserve_hardlinks', compare_hardlinks)
time = 10000
dest_rp = RPath(Globals.local_connection, dest_dirname)
restore_rp = RPath(Globals.local_connection, restore_dirname)
dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
restore_rp = rpath.RPath(Globals.local_connection, restore_dirname)
os.system(MiscDir + "/myrm " + dest_dirname)
Myrm(dest_dirname)
for dirname in list_of_dirnames:
src_rp = RPath(Globals.local_connection, dirname)
src_rp = rpath.RPath(Globals.local_connection, dirname)
reset_hardlink_dicts()
_reset_connections(src_rp, dest_rp)
......@@ -287,10 +289,10 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
time = 10000
for dirname in list_of_dirnames[:-1]:
reset_hardlink_dicts()
os.system(MiscDir + "/myrm " + restore_dirname)
Myrm(restore_dirname)
InternalRestore(dest_local, source_local, dest_dirname,
restore_dirname, time)
src_rp = RPath(Globals.local_connection, dirname)
src_rp = rpath.RPath(Globals.local_connection, dirname)
assert CompareRecursive(src_rp, restore_rp)
# Restore should default back to newest time older than it
......@@ -304,11 +306,11 @@ def MirrorTest(source_local, dest_local, list_of_dirnames,
dest_dirname = "testfiles/output"):
"""Mirror each of list_of_dirnames, and compare after each"""
Globals.set('preserve_hardlinks', compare_hardlinks)
dest_rp = RPath(Globals.local_connection, dest_dirname)
dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
os.system(MiscDir + "/myrm " + dest_dirname)
Myrm(dest_dirname)
for dirname in list_of_dirnames:
src_rp = RPath(Globals.local_connection, dirname)
src_rp = rpath.RPath(Globals.local_connection, dirname)
reset_hardlink_dicts()
_reset_connections(src_rp, dest_rp)
......
import unittest, types, tempfile, os, sys
from commontest import *
from rdiff_backup.connection import *
from rdiff_backup import Globals
from rdiff_backup import Globals, rpath
class LocalConnectionTest(unittest.TestCase):
"""Test the dummy connection"""
......@@ -104,7 +104,7 @@ class PipeConnectionTest(unittest.TestCase):
"""Test module emulation"""
assert type(self.conn.tempfile.mktemp()) is types.StringType
assert self.conn.os.path.join("a", "b") == "a/b"
rp1 = RPath(self.conn, self.regfilename)
rp1 = rpath.RPath(self.conn, self.regfilename)
assert rp1.isreg()
def testVirtualFiles(self):
......@@ -112,17 +112,17 @@ class PipeConnectionTest(unittest.TestCase):
tempout = self.conn.open("testfiles/tempout", "w")
assert isinstance(tempout, VirtualFile)
regfilefp = open(self.regfilename, "r")
RPath.copyfileobj(regfilefp, tempout)
rpath.copyfileobj(regfilefp, tempout)
tempout.close()
regfilefp.close()
tempoutlocal = open("testfiles/tempout", "r")
regfilefp = open(self.regfilename, "r")
assert RPath.cmpfileobj(regfilefp, tempoutlocal)
assert rpath.cmpfileobj(regfilefp, tempoutlocal)
tempoutlocal.close()
regfilefp.close()
os.unlink("testfiles/tempout")
assert RPath.cmpfileobj(self.conn.open(self.regfilename, "r"),
assert rpath.cmpfileobj(self.conn.open(self.regfilename, "r"),
open(self.regfilename, "r"))
def testString(self):
......@@ -139,7 +139,8 @@ class PipeConnectionTest(unittest.TestCase):
def testRPaths(self):
"""Test transmission of rpaths"""
rp = RPath(self.conn, "testfiles/various_file_types/regular_file")
rp = rpath.RPath(self.conn,
"testfiles/various_file_types/regular_file")
assert self.conn.reval("lambda rp: rp.data", rp) == rp.data
assert self.conn.reval("lambda rp: rp.conn is Globals.local_connection", rp)
......@@ -192,7 +193,7 @@ class RedirectedConnectionTest(unittest.TestCase):
def testRpaths(self):
"""Test moving rpaths back and forth across connections"""
rp = RPath(self.conna, "foo")
rp = rpath.RPath(self.conna, "foo")
self.connb.Globals.set("tmp_rpath", rp)
rp_returned = self.connb.Globals.get("tmp_rpath")
assert rp_returned.conn is rp.conn
......
import unittest
from commontest import *
from rdiff_backup.C import *
from rdiff_backup import C
from rdiff_backup.rpath import *
class CTest(unittest.TestCase):
......
from __future__ import generators
import unittest
from commontest import *
from rdiff_backup.rpath import *
from rdiff_backup.selection import *
from rdiff_backup import Globals
from rdiff_backup import rpath, selection, Globals, destructive_stepping
Log.setverbosity(4)
class DSTest(unittest.TestCase):
def setUp(self):
self.lc = Globals.local_connection
self.noperms = RPath(self.lc, "testfiles/noperms")
self.noperms = rpath.RPath(self.lc, "testfiles/noperms")
Globals.change_source_perms = 1
self.iteration_dir = RPath(self.lc, "testfiles/iteration-test")
self.iteration_dir = rpath.RPath(self.lc, "testfiles/iteration-test")
def testDSIter(self):
"""Testing destructive stepping iterator from baserp"""
for i in range(2):
sel = Select(DSRPath(1, self.noperms)).set_iter()
sel = selection.Select(destructive_stepping.
DSRPath(1, self.noperms)).set_iter()
ds_iter = sel.iterate_with_finalizer()
noperms = ds_iter.next()
assert noperms.isdir() and noperms.getperms() == 0, \
......
import unittest, os, re, sys
import unittest, os, re, sys, time
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.rpath import *
from rdiff_backup import Globals
from rdiff_backup import Globals, log, rpath
"""Regression tests"""
Globals.exclude_mirror_regexps = [re.compile(".*/rdiff-backup-data")]
Log.setverbosity(7)
log.Log.setverbosity(7)
lc = Globals.local_connection
......@@ -15,7 +13,7 @@ class Local:
"""This is just a place to put increments relative to the local
connection"""
def get_local_rp(extension):
return RPath(Globals.local_connection, "testfiles/" + extension)
return rpath.RPath(Globals.local_connection, "testfiles/" + extension)
vftrp = get_local_rp('various_file_types')
inc1rp = get_local_rp('increment1')
......@@ -154,7 +152,7 @@ class PathSetter(unittest.TestCase):
"testfiles/output/rdiff-backup-data/increments")
self.exec_rb(None, timbar_paths[0])
self.refresh(Local.timbar_in, Local.timbar_out)
assert RPath.cmp_with_attribs(Local.timbar_in, Local.timbar_out)
assert rpath.cmp_with_attribs(Local.timbar_in, Local.timbar_out)
self.exec_rb_restore(25000, 'testfiles/output/various_file_types',
'testfiles/vft2_out')
......@@ -173,8 +171,8 @@ class PathSetter(unittest.TestCase):
incfiles = filter(lambda s: s.startswith(basename),
os.listdir(directory))
incfiles.sort()
incrps = map(lambda f: RPath(lc, directory+"/"+f), incfiles)
return map(lambda x: x.path, filter(RPath.isincfile, incrps))
incrps = map(lambda f: rpath.RPath(lc, directory+"/"+f), incfiles)
return map(lambda x: x.path, filter(rpath.RPath.isincfile, incrps))
class Final(PathSetter):
......@@ -287,7 +285,7 @@ testfiles/increment2/changed_dir""")
"testfiles/output/changed_dir/foo")
# Test selective restoring
mirror_rp = RPath(Globals.local_connection, "testfiles/output")
mirror_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
restore_filename = get_increment_rp(mirror_rp, 10000).path
assert not os.system(self.rb_schema +
"--include testfiles/restoretarget1/various_file_types/"
......@@ -321,7 +319,7 @@ testfiles/increment2/changed_dir""")
# Make an exclude list
os.mkdir("testfiles/vft_out")
excluderp = RPath(Globals.local_connection,
excluderp = rpath.RPath(Globals.local_connection,
"testfiles/vft_out/exclude")
fp = excluderp.open("w")
fp.write("""
......@@ -331,7 +329,7 @@ testfiles/increment2/changed_dir""")
assert not fp.close()
# Make an include list
includerp = RPath(Globals.local_connection,
includerp = rpath.RPath(Globals.local_connection,
"testfiles/vft_out/include")
fp = includerp.open("w")
fp.write("""
......
......@@ -6,7 +6,7 @@ class RemoteMirrorTest(unittest.TestCase):
"""Test mirroring"""
def setUp(self):
"""Start server"""
Log.setverbosity(7)
Log.setverbosity(3)
Globals.change_source_perms = 1
SetConnections.UpdateGlobal('checkpoint_interval', 3)
......
import unittest, os
import unittest, os, re, time
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.rpath import *
from rdiff_backup.restore import *
from rdiff_backup import log, rpath, restore, increment, Time, \
Rdiff, statistics
lc = Globals.local_connection
Globals.change_source_perms = 1
Log.setverbosity(3)
def getrp(ending):
return RPath(lc, "testfiles/various_file_types/" + ending)
return rpath.RPath(lc, "testfiles/various_file_types/" + ending)
rf = getrp("regular_file")
rf2 = getrp("two_hardlinked_files1")
......@@ -22,11 +21,11 @@ dir = getrp(".")
sym = getrp("symbolic_link")
nothing = getrp("nothing")
target = RPath(lc, "testfiles/out")
out2 = RPath(lc, "testfiles/out2")
out_gz = RPath(lc, "testfiles/out.gz")
target = rpath.RPath(lc, "testfiles/out")
out2 = rpath.RPath(lc, "testfiles/out2")
out_gz = rpath.RPath(lc, "testfiles/out.gz")
Time.setprevtime(999424113.24931)
Time.setprevtime(999424113)
prevtimestr = "2001-09-02T02:48:33-07:00"
t_pref = "testfiles/out.2001-09-02T02:48:33-07:00"
t_diff = "testfiles/out.2001-09-02T02:48:33-07:00.diff"
......@@ -39,78 +38,72 @@ class inctest(unittest.TestCase):
def setUp(self):
Globals.set('isbackup_writer',1)
def check_time(self, rp):
"""Make sure that rp is an inc file, and time is Time.prevtime"""
assert rp.isincfile(), rp
t = Time.stringtotime(rp.getinctime())
assert t == Time.prevtime, (t, Time.prevtime)
def testreg(self):
"""Test increment of regular files"""
Globals.compression = None
target.setdata()
if target.lstat(): target.delete()
rpd = RPath(lc, t_diff)
rpd = rpath.RPath(lc, t_diff)
if rpd.lstat(): rpd.delete()
Inc.Increment(rf, exec1, target)
rpd.setdata()
assert rpd.isreg(), rpd
assert RPath.cmp_attribs(rpd, exec1)
rpd.delete()
diffrp = increment.Increment(rf, exec1, target)
assert diffrp.isreg(), diffrp
assert rpath.cmp_attribs(diffrp, exec1)
self.check_time(diffrp)
assert diffrp.getinctype() == 'diff', diffrp.getinctype()
diffrp.delete()
def testmissing(self):
"""Test creation of missing files"""
Inc.Increment(rf, nothing, target)
rp = RPath(lc, t_pref + ".missing")
assert rp.lstat()
rp.delete()
missing_rp = increment.Increment(rf, nothing, target)
self.check_time(missing_rp)
assert missing_rp.getinctype() == 'missing'
missing_rp.delete()
def testsnapshot(self):
"""Test making of a snapshot"""
Globals.compression = None
rp = RPath(lc, t_pref + ".snapshot")
if rp.lstat(): rp.delete()
Inc.Increment(rf, sym, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, sym)
assert RPath.cmp(rp, sym)
rp.delete()
rp = RPath(lc, t_pref + ".snapshot")
if rp.lstat(): rp.delete()
Inc.Increment(sym, rf, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf)
assert RPath.cmp(rp, rf)
rp.delete()
snap_rp = increment.Increment(rf, sym, target)
self.check_time(snap_rp)
assert rpath.cmp_attribs(snap_rp, sym)
assert rpath.cmp(snap_rp, sym)
snap_rp.delete()
snap_rp2 = increment.Increment(sym, rf, target)
self.check_time(snap_rp2)
assert rpath.cmp_attribs(snap_rp2, rf)
assert rpath.cmp(snap_rp2, rf)
snap_rp2.delete()
def testGzipsnapshot(self):
"""Test making a compressed snapshot"""
Globals.compression = 1
rp = RPath(lc, t_pref + ".snapshot")
if rp.lstat(): rp.delete()
Inc.Increment(rf, sym, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, sym)
assert RPath.cmp(rp, sym)
rp = increment.Increment(rf, sym, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, sym)
assert rpath.cmp(rp, sym)
rp.delete()
rp = RPath(lc, t_pref + ".snapshot.gz")
if rp.lstat(): rp.delete()
Inc.Increment(sym, rf, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf)
assert RPath.cmpfileobj(rp.open("rb", 1), rf.open("rb"))
rp = increment.Increment(sym, rf, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, rf)
assert rpath.cmpfileobj(rp.open("rb", 1), rf.open("rb"))
assert rp.isinccompressed()
rp.delete()
def testdir(self):
"""Test increment on dir"""
Inc.Increment(sym, dir, target)
rp = RPath(lc, t_pref + ".dir")
rp2 = RPath(lc, t_pref)
rp = increment.Increment(sym, dir, target)
self.check_time(rp)
assert rp.lstat()
assert target.isdir()
assert RPath.cmp_attribs(dir, rp)
assert rpath.cmp_attribs(dir, rp)
assert rp.isreg()
rp.delete()
target.delete()
......@@ -118,46 +111,36 @@ class inctest(unittest.TestCase):
def testDiff(self):
"""Test making diffs"""
Globals.compression = None
rp = RPath(lc, t_pref + '.diff')
if rp.lstat(): rp.delete()
Inc.Increment(rf, rf2, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf2)
rp = increment.Increment(rf, rf2, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2).execute()
assert RPath.cmp(rf2, out2)
assert rpath.cmp(rf2, out2)
rp.delete()
out2.delete()
def testGzipDiff(self):
"""Test making gzipped diffs"""
Globals.compression = 1
rp = RPath(lc, t_pref + '.diff.gz')
if rp.lstat(): rp.delete()
Inc.Increment(rf, rf2, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf2)
rp = increment.Increment(rf, rf2, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2, delta_compressed = 1).execute()
assert RPath.cmp(rf2, out2)
assert rpath.cmp(rf2, out2)
rp.delete()
out2.delete()
def testGzipRegexp(self):
"""Here a .gz file shouldn't be compressed"""
Globals.compression = 1
RPath.copy(rf, out_gz)
rpath.copy(rf, out_gz)
assert out_gz.lstat()
rp = RPath(lc, t_pref + '.diff')
if rp.lstat(): rp.delete()
Inc.Increment(rf, out_gz, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, out_gz)
rp = increment.Increment(rf, out_gz, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, out_gz)
Rdiff.patch_action(rf, rp, out2).execute()
assert RPath.cmp(out_gz, out2)
assert rpath.cmp(out_gz, out2)
rp.delete()
out2.delete()
out_gz.delete()
......@@ -194,7 +177,7 @@ class inctest2(unittest.TestCase):
InternalBackup(1, 1, "testfiles/stattest2", "testfiles/output",
time.time()+1)
rbdir = RPath(Globals.local_connection,
rbdir = rpath.RPath(Globals.local_connection,
"testfiles/output/rdiff-backup-data")
#incs = Restore.get_inclist(rbdir.append("subdir").
......@@ -217,14 +200,14 @@ class inctest2(unittest.TestCase):
#assert 400000 < subdir_stats.ChangedMirrorSize < 420000
#assert 10 < subdir_stats.IncrementFileSize < 20000
incs = Restore.get_inclist(rbdir.append("session_statistics"))
incs = restore.get_inclist(rbdir.append("session_statistics"))
assert len(incs) == 2
s2 = StatsObj().read_stats_from_rp(incs[0])
s2 = statistics.StatsObj().read_stats_from_rp(incs[0])
assert s2.SourceFiles == 7
assert 700000 < s2.SourceFileSize < 750000
self.stats_check_initial(s2)
root_stats = StatsObj().read_stats_from_rp(incs[1])
root_stats = statistics.StatsObj().read_stats_from_rp(incs[1])
assert root_stats.SourceFiles == 7, root_stats.SourceFiles
assert 550000 < root_stats.SourceFileSize < 570000
assert root_stats.MirrorFiles == 7
......
import unittest, StringIO
from commontest import *
from rdiff_backup.iterfile import *
from rdiff_backup import lazy
class testIterFile(unittest.TestCase):
......@@ -11,7 +12,7 @@ class testIterFile(unittest.TestCase):
def testConversion(self):
"""Test iter to file conversion"""
for itm in [self.iter1maker, self.iter2maker]:
assert Iter.equal(itm(),
assert lazy.Iter.equal(itm(),
IterWrappingFile(FileWrappingIter(itm())))
class testBufferedRead(unittest.TestCase):
......
......@@ -218,97 +218,4 @@ class MultiplexTest(Iterators):
assert Iter.equal(i2, self.one_to_100())
class ITRBadder(ITRBranch):
def start_process(self, index):
self.total = 0
def end_process(self):
if self.base_index:
summand = self.base_index[-1]
#print "Adding ", summand
self.total += summand
def branch_process(self, subinstance):
#print "Adding subinstance ", subinstance.total
self.total += subinstance.total
class ITRBadder2(ITRBranch):
def start_process(self, index):
self.total = 0
def end_process(self):
#print "Adding ", self.base_index
self.total += reduce(lambda x,y: x+y, self.base_index, 0)
def can_fast_process(self, index):
if len(index) == 3: return 1
else: return None
def fast_process(self, index):
self.total += index[0] + index[1] + index[2]
def branch_process(self, subinstance):
#print "Adding branch ", subinstance.total
self.total += subinstance.total
class TreeReducerTest(unittest.TestCase):
def setUp(self):
self.i1 = [(), (1,), (2,), (3,)]
self.i2 = [(0,), (0,1), (0,1,0), (0,1,1), (0,2), (0,2,1), (0,3)]
self.i1a = [(), (1,)]
self.i1b = [(2,), (3,)]
self.i2a = [(0,), (0,1), (0,1,0)]
self.i2b = [(0,1,1), (0,2)]
self.i2c = [(0,2,1), (0,3)]
def testTreeReducer(self):
"""testing IterTreeReducer"""
itm = IterTreeReducer(ITRBadder, [])
for index in self.i1:
val = itm(index)
assert val, (val, index)
itm.Finish()
assert itm.root_branch.total == 6, itm.root_branch.total
itm2 = IterTreeReducer(ITRBadder2, [])
for index in self.i2:
val = itm2(index)
if index == (): assert not val
else: assert val
itm2.Finish()
assert itm2.root_branch.total == 12, itm2.root_branch.total
def testTreeReducerState(self):
"""Test saving and recreation of an IterTreeReducer"""
itm1a = IterTreeReducer(ITRBadder, [])
for index in self.i1a:
val = itm1a(index)
assert val, index
itm1b = pickle.loads(pickle.dumps(itm1a))
for index in self.i1b:
val = itm1b(index)
assert val, index
itm1b.Finish()
assert itm1b.root_branch.total == 6, itm1b.root_branch.total
itm2a = IterTreeReducer(ITRBadder2, [])
for index in self.i2a:
val = itm2a(index)
if index == (): assert not val
else: assert val
itm2b = pickle.loads(pickle.dumps(itm2a))
for index in self.i2b:
val = itm2b(index)
if index == (): assert not val
else: assert val
itm2c = pickle.loads(pickle.dumps(itm2b))
for index in self.i2c:
val = itm2c(index)
if index == (): assert not val
else: assert val
itm2c.Finish()
assert itm2c.root_branch.total == 12, itm2c.root_branch.total
if __name__ == "__main__": unittest.main()
import unittest, os, cStringIO, time
from rdiff_backup.metadata import *
from rdiff_backup import rpath, Globals, selection, destructive_stepping
from rdiff_backup import rpath, connection, Globals, selection, \
destructive_stepping
tempdir = rpath.RPath(Globals.local_connection, "testfiles/output")
......@@ -61,9 +62,8 @@ class MetadataTest(unittest.TestCase):
if temprp.lstat(): return temprp
self.make_temp()
root = rpath.RPath(Globals.local_connection, "testfiles/bigdir")
dsrp_root = destructive_stepping.DSRPath(1, root)
rpath_iter = selection.Select(dsrp_root).set_iter()
rootrp = rpath.RPath(Globals.local_connection, "testfiles/bigdir")
rpath_iter = selection.Select(rootrp).set_iter()
start_time = time.time()
OpenMetadata(temprp)
......
import unittest, random
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.selection import *
from rdiff_backup import Globals, Rdiff
from rdiff_backup import Globals, Rdiff, selection, log, rpath
Log.setverbosity(6)
......@@ -19,18 +17,19 @@ def MakeRandomFile(path):
class RdiffTest(unittest.TestCase):
"""Test rdiff"""
lc = Globals.local_connection
basis = RPath(lc, "testfiles/basis")
new = RPath(lc, "testfiles/new")
output = RPath(lc, "testfiles/output")
delta = RPath(lc, "testfiles/delta")
signature = RPath(lc, "testfiles/signature")
basis = rpath.RPath(lc, "testfiles/basis")
new = rpath.RPath(lc, "testfiles/new")
output = rpath.RPath(lc, "testfiles/output")
delta = rpath.RPath(lc, "testfiles/delta")
signature = rpath.RPath(lc, "testfiles/signature")
def testRdiffSig(self):
"""Test making rdiff signatures"""
sig = RPath(self.lc, "testfiles/various_file_types/regular_file.sig")
sig = rpath.RPath(self.lc,
"testfiles/various_file_types/regular_file.sig")
sigfp = sig.open("r")
rfsig = Rdiff.get_signature(RPath(self.lc, "testfiles/various_file_types/regular_file"))
assert RPath.cmpfileobj(sigfp, rfsig)
assert rpath.cmpfileobj(sigfp, rfsig)
sigfp.close()
rfsig.close()
......@@ -44,7 +43,7 @@ class RdiffTest(unittest.TestCase):
for i in range(2):
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
map(RPath.setdata, [self.basis, self.new])
map(rpath.RPath.setdata, [self.basis, self.new])
assert self.basis.lstat() and self.new.lstat()
self.signature.write_from_fileobj(Rdiff.get_signature(self.basis))
assert self.signature.lstat()
......@@ -52,8 +51,8 @@ class RdiffTest(unittest.TestCase):
self.new))
assert self.delta.lstat()
Rdiff.patch_action(self.basis, self.delta, self.output).execute()
assert RPath.cmp(self.new, self.output)
map(RPath.delete, rplist)
assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist)
def testRdiffDeltaPatchGzip(self):
"""Same as above by try gzipping patches"""
......@@ -64,7 +63,7 @@ class RdiffTest(unittest.TestCase):
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
map(RPath.setdata, [self.basis, self.new])
map(rpath.RPath.setdata, [self.basis, self.new])
assert self.basis.lstat() and self.new.lstat()
self.signature.write_from_fileobj(Rdiff.get_signature(self.basis))
assert self.signature.lstat()
......@@ -77,8 +76,8 @@ class RdiffTest(unittest.TestCase):
Rdiff.patch_action(self.basis, self.delta, self.output,
delta_compressed = 1).execute()
assert RPath.cmp(self.new, self.output)
map(RPath.delete, rplist)
assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist)
def testWriteDelta(self):
"""Test write delta feature of rdiff"""
......@@ -86,23 +85,23 @@ class RdiffTest(unittest.TestCase):
rplist = [self.basis, self.new, self.delta, self.output]
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
map(RPath.setdata, [self.basis, self.new])
map(rpath.RPath.setdata, [self.basis, self.new])
assert self.basis.lstat() and self.new.lstat()
Rdiff.write_delta(self.basis, self.new, self.delta)
assert self.delta.lstat()
Rdiff.patch_action(self.basis, self.delta, self.output).execute()
assert RPath.cmp(self.new, self.output)
map(RPath.delete, rplist)
assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist)
def testWriteDeltaGzip(self):
"""Same as above but delta is written gzipped"""
rplist = [self.basis, self.new, self.delta, self.output]
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
map(RPath.setdata, [self.basis, self.new])
map(rpath.RPath.setdata, [self.basis, self.new])
assert self.basis.lstat() and self.new.lstat()
delta_gz = RPath(self.delta.conn, self.delta.path + ".gz")
delta_gz = rpath.RPath(self.delta.conn, self.delta.path + ".gz")
if delta_gz.lstat(): delta_gz.delete()
Rdiff.write_delta(self.basis, self.new, delta_gz, 1)
......@@ -111,8 +110,8 @@ class RdiffTest(unittest.TestCase):
delta_gz.setdata()
self.delta.setdata()
Rdiff.patch_action(self.basis, self.delta, self.output).execute()
assert RPath.cmp(self.new, self.output)
map(RPath.delete, rplist)
assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist)
def testRdiffRename(self):
"""Rdiff replacing original file with patch outfile"""
......@@ -122,7 +121,7 @@ class RdiffTest(unittest.TestCase):
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
map(RPath.setdata, [self.basis, self.new])
map(rpath.RPath.setdata, [self.basis, self.new])
assert self.basis.lstat() and self.new.lstat()
self.signature.write_from_fileobj(Rdiff.get_signature(self.basis))
assert self.signature.lstat()
......@@ -130,8 +129,8 @@ class RdiffTest(unittest.TestCase):
self.new))
assert self.delta.lstat()
Rdiff.patch_action(self.basis, self.delta).execute()
assert RPath.cmp(self.basis, self.new)
map(RPath.delete, rplist)
assert rpath.cmp(self.basis, self.new)
map(rpath.RPath.delete, rplist)
def testCopy(self):
"""Using rdiff to copy two files"""
......@@ -141,10 +140,10 @@ class RdiffTest(unittest.TestCase):
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
map(RPath.setdata, rplist)
map(rpath.RPath.setdata, rplist)
Rdiff.copy_action(self.basis, self.new).execute()
assert RPath.cmp(self.basis, self.new)
map(RPath.delete, rplist)
assert rpath.cmp(self.basis, self.new)
map(rpath.RPath.delete, rplist)
def testPatchWithAttribs(self):
"""Using rdiff to copy two files with attributes"""
......@@ -155,9 +154,9 @@ class RdiffTest(unittest.TestCase):
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
self.new.chmod(0401)
map(RPath.setdata, rplist)
map(rpath.RPath.setdata, rplist)
Rdiff.write_delta(self.basis, self.new, self.delta)
RPath.copy_attribs(self.new, self.delta)
rpath.copy_attribs(self.new, self.delta)
assert self.delta.getperms() == 0401
assert not self.basis == self.new
......@@ -165,7 +164,7 @@ class RdiffTest(unittest.TestCase):
if not self.basis == self.new:
print self.basis, self.new
assert 0
map(RPath.delete, rplist)
map(rpath.RPath.delete, rplist)
if __name__ == '__main__':
......
import unittest, os
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.rpath import *
from rdiff_backup import Globals, SetConnections
from rdiff_backup import Globals, SetConnections, log, rpath
"""Regression tests
......@@ -14,13 +12,13 @@ testfiles
Globals.set('change_source_perms', 1)
Globals.counter = 0
Log.setverbosity(7)
log.Log.setverbosity(7)
class Local:
"""This is just a place to put increments relative to the local
connection"""
def get_local_rp(extension):
return RPath(Globals.local_connection, "testfiles/" + extension)
return rpath.RPath(Globals.local_connection, "testfiles/" + extension)
inc1rp = get_local_rp('increment1')
inc2rp = get_local_rp('increment2')
......@@ -55,10 +53,10 @@ class PathSetter(unittest.TestCase):
else: return ('./', Globals.local_connection)
def get_src_rp(self, path):
return RPath(self.src_conn, self.src_prefix + path)
return rpath.RPath(self.src_conn, self.src_prefix + path)
def get_dest_rp(self, path):
return RPath(self.dest_conn, self.dest_prefix + path)
return rpath.RPath(self.dest_conn, self.dest_prefix + path)
def set_rbdir(self, rpout):
"""Create rdiff-backup-data dir if not already, tell everyone"""
......@@ -89,7 +87,8 @@ class PathSetter(unittest.TestCase):
self.get_prefix_and_conn(dest_path, dest_return)
SetConnections.BackupInitConnections(self.src_conn, self.dest_conn)
os.system(MiscDir+"/myrm testfiles/output* testfiles/restoretarget* "
assert not os.system("rm -rf testfiles/output* "
"testfiles/restoretarget* "
"testfiles/noperms_output testfiles/root_output "
"testfiles/unreadable_out")
......@@ -157,7 +156,7 @@ class IncrementTest1(unittest.TestCase):
class IncrementTest2(PathSetter):
def OldtestRecoveryLocal(self):
"""Test to see if rdiff-backup can continue with bad increment"""
os.system(MiscDir+'/myrm testfiles/recovery_out_backup')
assert not os.system("rm -rf testfiles/recovery_out_backup")
self.setPathnames(None, None, None, None)
Time.setprevtime(1006136450)
Time.setcurtime()
......@@ -174,7 +173,7 @@ class IncrementTest2(PathSetter):
def OldtestRecoveryRemote(self):
"""Test Recovery with both connections remote"""
os.system(MiscDir+'/myrm testfiles/recovery_out_backup')
assert not os.system('rm -rf testfiles/recovery_out_backup')
self.setPathnames('test1', '../', 'test2/tmp', '../../')
Time.setprevtime(1006136450)
Time.setcurtime()
......@@ -360,7 +359,7 @@ class MirrorTest(PathSetter):
Globals.change_source_perms = 1
def deleteoutput(self):
os.system(MiscDir+"/myrm testfiles/output*")
assert not os.system("rm -rf testfiles/output*")
self.rbdir = self.rpout.append('rdiff-backup-data')
self.rpout.mkdir()
self.rbdir.mkdir()
......@@ -395,7 +394,7 @@ class MirrorTest(PathSetter):
Time.setcurtime()
SaveState.init_filenames()
self.Mirror(self.inc1rp, self.rpout)
#RPath.copy_attribs(self.inc1rp, self.rpout)
#rpath.RPath.copy_attribs(self.inc1rp, self.rpout)
assert CompareRecursive(Local.inc1rp, Local.rpout)
self.Mirror(self.inc2rp, self.rpout)
......
import unittest
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.restore import *
from rdiff_backup import Globals
from rdiff_backup import log, restore, Globals, rpath
Log.setverbosity(3)
......@@ -23,9 +21,9 @@ class RestoreTest(unittest.TestCase):
dirlist = os.listdir(self.prefix)
dirlist.sort()
baselist = filter(lambda f: f.startswith(basename), dirlist)
rps = map(lambda f: RPath(lc, self.prefix+f), baselist)
rps = map(lambda f: rpath.RPath(lc, self.prefix+f), baselist)
incs = filter(lambda rp: rp.isincfile(), rps)
tuples = map(lambda rp: (rp, RPath(lc, "%s.%s" %
tuples = map(lambda rp: (rp, rpath.RPath(lc, "%s.%s" %
(rp.getincbase().path,
rp.getinctime()))),
incs)
......@@ -33,16 +31,16 @@ class RestoreTest(unittest.TestCase):
def restoreonefiletest(self, basename):
tuples, incs = self.maketesttuples(basename)
rpbase = RPath(lc, self.prefix + basename)
rptarget = RPath(lc, "testfiles/outfile")
rpbase = rpath.RPath(lc, self.prefix + basename)
rptarget = rpath.RPath(lc, "testfiles/outfile")
for pair in tuples:
print "Processing file " + pair[0].path
if rptarget.lstat(): rptarget.delete()
rest_time = Time.stringtotime(pair[0].getinctime())
rid = RestoreIncrementData((), rpbase, incs)
rid = restore.RestoreIncrementData((), rpbase, incs)
rid.sortincseq(rest_time, 10000000000) # pick some really late time
rcd = RestoreCombinedData(rid, rpbase, rptarget)
rcd = restore.RestoreCombinedData(rid, rpbase, rptarget)
rcd.RestoreFile()
#sorted_incs = Restore.sortincseq(rest_time, incs)
#Restore.RestoreFile(rest_time, rpbase, (), sorted_incs, rptarget)
......@@ -50,9 +48,9 @@ class RestoreTest(unittest.TestCase):
if not rptarget.lstat(): assert not pair[1].lstat()
elif not pair[1].lstat(): assert not rptarget.lstat()
else:
assert RPath.cmp(rptarget, pair[1]), \
assert rpath.cmp(rptarget, pair[1]), \
"%s %s" % (rptarget.path, pair[1].path)
assert RPath.cmp_attribs(rptarget, pair[1]), \
assert rpath.cmp_attribs(rptarget, pair[1]), \
"%s %s" % (rptarget.path, pair[1].path)
rptarget.delete()
......@@ -75,7 +73,7 @@ class RestoreTest(unittest.TestCase):
for inc, incbase in tuples:
assert inc.isincfile()
inctime = Time.stringtotime(inc.getinctime())
rid1 = RestoreIncrementData(basename, incbase, incs)
rid1 = restore.RestoreIncrementData(basename, incbase, incs)
rid1.sortincseq(inctime, mirror_time)
assert rid1.inc_list, rid1.inc_list
# oldest increment should be exactly inctime
......@@ -97,8 +95,8 @@ class RestoreTest(unittest.TestCase):
InternalRestore(1, 1, "testfiles/restoretest3",
"testfiles/output", 20000)
src_rp = RPath(Globals.local_connection, "testfiles/increment2")
restore_rp = RPath(Globals.local_connection, "testfiles/output")
src_rp = rpath.RPath(Globals.local_connection, "testfiles/increment2")
restore_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
assert CompareRecursive(src_rp, restore_rp)
def testRestoreCorrupt(self):
......
import os, unittest
from commontest import *
from rdiff_backup.rpath import *
from rdiff_backup.robust import *
from rdiff_backup import rpath, robust, TempFile, Globals
class TestRobustAction(unittest.TestCase):
"""Test some robust actions"""
def testCopyWithAttribs(self):
"""Test copy with attribs action"""
rpin = RPath(Globals.local_connection, "./testfiles/robust/in")
rpin = rpath.RPath(Globals.local_connection, "./testfiles/robust/in")
fp = open("./testfiles/robust/in", "wb")
fp.write("hello there")
fp.close()
......@@ -16,8 +15,8 @@ class TestRobustAction(unittest.TestCase):
rpin.setdata()
assert rpin.isreg() and rpin.getperms() % 01000 == 0604
rpout = RPath(Globals.local_connection, "./testfiles/robust/out")
Robust.copy_with_attribs_action(rpin, rpout).execute()
rpout = rpath.RPath(Globals.local_connection, "./testfiles/robust/out")
robust.copy_with_attribs_action(rpin, rpout).execute()
if not rpout == rpin:
print rpout, rpin
assert 0
......@@ -28,7 +27,7 @@ class TestRobustAction(unittest.TestCase):
class TempFileTest(unittest.TestCase):
"""Test creation and management of tempfiles"""
rp_base = RPath(Globals.local_connection,
rp_base = rpath.RPath(Globals.local_connection,
"./testfiles/robust/testfile_base")
def testBasic(self):
"""Make a temp file, write to it, and then delete it
......@@ -36,9 +35,9 @@ class TempFileTest(unittest.TestCase):
Also test tempfile accounting and file name prefixing.
"""
assert not TempFileManager._tempfiles
tf = TempFileManager.new(self.rp_base)
assert TempFileManager._tempfiles == [tf]
assert not TempFile._tempfiles
tf = TempFile.new(self.rp_base)
assert TempFile._tempfiles == [tf]
assert tf.dirsplit()[0] == "testfiles/robust", tf.dirsplit()[0]
assert not tf.lstat()
fp = tf.open("w")
......@@ -48,35 +47,35 @@ class TempFileTest(unittest.TestCase):
assert fp.read() == "hello"
assert not fp.close()
tf.delete()
assert not TempFileManager._tempfiles
assert not TempFile._tempfiles
def testRename(self):
"""Test renaming of tempfile"""
tf = TempFileManager.new(self.rp_base)
assert TempFileManager._tempfiles
tf = TempFile.new(self.rp_base)
assert TempFile._tempfiles
tf.touch()
destination = RPath(Globals.local_connection,
destination = rpath.RPath(Globals.local_connection,
"./testfiles/robust/testfile_dest")
tf.rename(destination)
assert not TempFileManager._tempfiles
assert not TempFile._tempfiles
assert destination.lstat()
destination.delete()
class SaveStateTest(unittest.TestCase):
"""Test SaveState class"""
data_dir = RPath(Globals.local_connection, "testfiles/robust")
data_dir = rpath.RPath(Globals.local_connection, "testfiles/robust")
def testSymlinking(self):
"""Test recording last file with symlink"""
last_rorp = RORPath(('usr', 'local', 'bin', 'ls'))
last_rorp = rpath.RORPath(('usr', 'local', 'bin', 'ls'))
Globals.rbdir = self.data_dir
Time.setcurtime()
SetConnections.BackupInitConnections(Globals.local_connection,
Globals.local_connection)
SaveState.init_filenames()
SaveState.record_last_file_action(last_rorp).execute()
robust.SaveState.init_filenames()
robust.SaveState.record_last_file_action(last_rorp).execute()
sym_rp = RPath(Globals.local_connection,
sym_rp = rpath.RPath(Globals.local_connection,
"testfiles/robust/last-file-incremented.%s.data" %
Time.curtimestr)
assert sym_rp.issym()
......
import unittest
from __future__ import generators
import unittest, time, pickle
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.rpath import *
from rdiff_backup.rorpiter import *
from rdiff_backup import Globals
from rdiff_backup import log, rpath, rorpiter, Globals, lazy
#Log.setverbosity(8)
......@@ -17,10 +14,10 @@ class index:
class RORPIterTest(unittest.TestCase):
def setUp(self):
self.lc = Globals.local_connection
self.inc0rp = RPath(self.lc, "testfiles/empty", ())
self.inc1rp = RPath(self.lc, "testfiles/inc-reg-perms1", ())
self.inc2rp = RPath(self.lc, "testfiles/inc-reg-perms2", ())
self.output = RPath(self.lc, "testfiles/output", ())
self.inc0rp = rpath.RPath(self.lc, "testfiles/empty", ())
self.inc1rp = rpath.RPath(self.lc, "testfiles/inc-reg-perms1", ())
self.inc2rp = rpath.RPath(self.lc, "testfiles/inc-reg-perms2", ())
self.output = rpath.RPath(self.lc, "testfiles/output", ())
def testCollateIterators(self):
"""Test basic collating"""
......@@ -31,14 +28,14 @@ class RORPIterTest(unittest.TestCase):
makeiter2 = lambda: iter(map(helper, [0,1,3]))
makeiter3 = lambda: iter(map(helper, [1,2]))
outiter = RORPIter.CollateIterators(makeiter1(), makeiter2())
assert Iter.equal(outiter,
outiter = rorpiter.CollateIterators(makeiter1(), makeiter2())
assert lazy.Iter.equal(outiter,
iter([(indicies[0], indicies[0]),
(indicies[1], indicies[1]),
(indicies[2], None),
(indicies[3], indicies[3])]))
assert Iter.equal(RORPIter.CollateIterators(makeiter1(),
assert lazy.Iter.equal(rorpiter.CollateIterators(makeiter1(),
makeiter2(),
makeiter3()),
iter([(indicies[0], indicies[0], None),
......@@ -46,29 +43,32 @@ class RORPIterTest(unittest.TestCase):
(indicies[2], None, indicies[2]),
(indicies[3], indicies[3], None)]))
assert Iter.equal(RORPIter.CollateIterators(makeiter1(), iter([])),
assert lazy.Iter.equal(rorpiter.CollateIterators(makeiter1(),
iter([])),
iter(map(lambda i: (i, None),
indicies)))
assert Iter.equal(iter(map(lambda i: (i, None), indicies)),
RORPIter.CollateIterators(makeiter1(), iter([])))
assert lazy.Iter.equal(iter(map(lambda i: (i, None), indicies)),
rorpiter.CollateIterators(makeiter1(),
iter([])))
def testCombinedPatching(self):
"""Combined signature, patch, and diff operations"""
if self.output.lstat(): self.output.delete()
if self.output.lstat():
Myrm(self.output.path)
self.output.setdata()
def turninto(final_rp):
sigfile = RORPIter.ToFile(RORPIter.GetSignatureIter(self.output))
diff_file = RORPIter.ToFile(
RORPIter.GetDiffIter(RORPIter.FromFile(sigfile),
RORPIter.IterateRPaths(final_rp)))
RORPIter.PatchIter(self.output, RORPIter.FromFile(diff_file))
sigfile = rorpiter.ToFile(rorpiter.GetSignatureIter(self.output))
diff_file = rorpiter.ToFile(rorpiter.GetDiffIter(
rorpiter.FromFile(sigfile), rorpiter.IterateRPaths(final_rp)))
rorpiter.PatchIter(self.output, rorpiter.FromFile(diff_file))
turninto(self.inc1rp)
RPath.copy_attribs(self.inc1rp, self.output) # Update time
rpath.copy_attribs(self.inc1rp, self.output) # Update time
assert self.compare_no_times(self.inc1rp, self.output)
turninto(self.inc2rp)
RPath.copy_attribs(self.inc2rp, self.output)
rpath.copy_attribs(self.inc2rp, self.output)
assert self.compare_no_times(self.inc2rp, self.output)
def compare_no_times(self, src_rp, dest_rp):
......@@ -83,8 +83,8 @@ class RORPIterTest(unittest.TestCase):
class IndexedTupleTest(unittest.TestCase):
def testTuple(self):
"""Test indexed tuple"""
i = IndexedTuple((1,2,3), ("a", "b"))
i2 = IndexedTuple((), ("hello", "there", "how are you"))
i = rorpiter.IndexedTuple((1,2,3), ("a", "b"))
i2 = rorpiter.IndexedTuple((), ("hello", "there", "how are you"))
assert i[0] == "a"
assert i[1] == "b"
......@@ -93,10 +93,186 @@ class IndexedTupleTest(unittest.TestCase):
assert i2 < i, i2 < i
def testTupleAssignment(self):
a, b, c = IndexedTuple((), (1, 2, 3))
a, b, c = rorpiter.IndexedTuple((), (1, 2, 3))
assert a == 1
assert b == 2
assert c == 3
class DirHandlerTest(unittest.TestCase):
made_test_dir = 0 # Set to 1 once we have made the test dir
def make_test_dir(self):
"""Make the test directory"""
self.rootrp = RPath(Globals.local_connection, "testfiles/output")
self.rootrp.delete()
self.rootrp.mkdir()
self.a = self.rootrp.append("a")
self.b = self.rootrp.append("b")
self.c = self.rootrp.append("c")
self.a.mkdir()
self.b.mkdir()
self.b.chmod(0700)
self.c.mkdir()
self.c.chmod(0500) # No write permissions to c
self.rootmtime = self.rootrp.getmtime()
self.amtime = self.a.getmtime()
self.bmtime = self.b.getmtime()
self.cmtime = self.c.getmtime()
self.made_test_dir = 1
def test_times_and_writes(self):
"""Test writing without disrupting times, and to unwriteable dir"""
return
self.make_test_dir()
time.sleep(1) # make sure the mtimes would get updated otherwise
DH = DirHandler(self.rootrp)
new_a_rp = self.a.append("foo")
DH(new_a_rp)
new_a_rp.touch()
DH(self.b)
self.b.chmod(0751)
new_b_rp = self.b.append("aoenuth")
DH(new_b_rp)
new_b_rp.touch()
new_root_rp = self.rootrp.append("bb")
DH(new_root_rp)
new_root_rp.touch()
new_c_rp = self.c.append("bar")
DH(new_c_rp)
new_c_rp.touch()
DH.Finish()
assert new_a_rp.lstat() and new_b_rp.lstat() and new_c_rp.lstat()
self.a.setdata()
self.b.setdata()
self.c.setdata()
assert self.a.getmtime() == self.amtime
assert self.c.getmtime() == self.cmtime
assert self.rootrp.getmtime() == self.rootmtime
assert self.b.getperms() == 0751
assert self.c.getperms() == 0500
class FillTest(unittest.TestCase):
def test_fill_in(self):
"""Test fill_in_iter"""
rootrp = RPath(Globals.local_connection, "testfiles/output")
def get_rpiter():
for int_index in [(1,2), (1,3), (1,4),
(2,), (2,1),
(3,4,5), (3,6)]:
index = tuple(map(lambda i: str(i), int_index))
yield rootrp.new_index(index)
filled_in = rorpiter.FillInIter(get_rpiter(), rootrp)
rp_list = list(filled_in)
index_list = map(lambda rp: tuple(map(int, rp.index)), rp_list)
assert index_list == [(), (1,), (1,2), (1,3), (1,4),
(2,), (2,1),
(3,), (3,4), (3,4,5), (3,6)], index_list
class ITRBadder(rorpiter.ITRBranch):
def start_process(self, index):
self.total = 0
def end_process(self):
if self.base_index:
summand = self.base_index[-1]
#print "Adding ", summand
self.total += summand
def branch_process(self, subinstance):
#print "Adding subinstance ", subinstance.total
self.total += subinstance.total
class ITRBadder2(rorpiter.ITRBranch):
def start_process(self, index):
self.total = 0
def end_process(self):
#print "Adding ", self.base_index
self.total += reduce(lambda x,y: x+y, self.base_index, 0)
def can_fast_process(self, index):
if len(index) == 3: return 1
else: return None
def fast_process(self, index):
self.total += index[0] + index[1] + index[2]
def branch_process(self, subinstance):
#print "Adding branch ", subinstance.total
self.total += subinstance.total
class TreeReducerTest(unittest.TestCase):
def setUp(self):
self.i1 = [(), (1,), (2,), (3,)]
self.i2 = [(0,), (0,1), (0,1,0), (0,1,1), (0,2), (0,2,1), (0,3)]
self.i1a = [(), (1,)]
self.i1b = [(2,), (3,)]
self.i2a = [(0,), (0,1), (0,1,0)]
self.i2b = [(0,1,1), (0,2)]
self.i2c = [(0,2,1), (0,3)]
def testTreeReducer(self):
"""testing IterTreeReducer"""
itm = rorpiter.IterTreeReducer(ITRBadder, [])
for index in self.i1:
val = itm(index)
assert val, (val, index)
itm.Finish()
assert itm.root_branch.total == 6, itm.root_branch.total
itm2 = rorpiter.IterTreeReducer(ITRBadder2, [])
for index in self.i2:
val = itm2(index)
if index == (): assert not val
else: assert val
itm2.Finish()
assert itm2.root_branch.total == 12, itm2.root_branch.total
def testTreeReducerState(self):
"""Test saving and recreation of an IterTreeReducer"""
itm1a = rorpiter.IterTreeReducer(ITRBadder, [])
for index in self.i1a:
val = itm1a(index)
assert val, index
itm1b = pickle.loads(pickle.dumps(itm1a))
for index in self.i1b:
val = itm1b(index)
assert val, index
itm1b.Finish()
assert itm1b.root_branch.total == 6, itm1b.root_branch.total
itm2a = rorpiter.IterTreeReducer(ITRBadder2, [])
for index in self.i2a:
val = itm2a(index)
if index == (): assert not val
else: assert val
itm2b = pickle.loads(pickle.dumps(itm2a))
for index in self.i2b:
val = itm2b(index)
if index == (): assert not val
else: assert val
itm2c = pickle.loads(pickle.dumps(itm2b))
for index in self.i2c:
val = itm2c(index)
if index == (): assert not val
else: assert val
itm2c.Finish()
assert itm2c.root_branch.total == 12, itm2c.root_branch.total
if __name__ == "__main__": unittest.main()
import os, cPickle, sys, unittest
from commontest import *
from rdiff_backup.rpath import *
from rdiff_backup import rpath
class RPathTest(unittest.TestCase):
lc = Globals.local_connection
......@@ -313,18 +313,18 @@ class FileCopying(RPathTest):
def testComp(self):
"""Test comparisons involving regular files"""
assert RPath.cmp(self.hl1, self.hl2)
assert not RPath.cmp(self.rf, self.hl1)
assert not RPath.cmp(self.dir, self.rf)
assert rpath.cmp(self.hl1, self.hl2)
assert not rpath.cmp(self.rf, self.hl1)
assert not rpath.cmp(self.dir, self.rf)
def testCompMisc(self):
"""Test miscellaneous comparisons"""
assert RPath.cmp(self.dir, RPath(self.lc, self.mainprefix, ()))
assert rpath.cmp(self.dir, RPath(self.lc, self.mainprefix, ()))
self.dest.symlink("regular_file")
assert RPath.cmp(self.sl, self.dest)
assert rpath.cmp(self.sl, self.dest)
self.dest.delete()
assert not RPath.cmp(self.sl, self.fifo)
assert not RPath.cmp(self.dir, self.sl)
assert not rpath.cmp(self.sl, self.fifo)
assert not rpath.cmp(self.dir, self.sl)
def testDirSizeComp(self):
"""Make sure directories can be equal,
......@@ -338,10 +338,10 @@ class FileCopying(RPathTest):
def testCopy(self):
"""Test copy of various files"""
for rp in [self.sl, self.rf, self.fifo, self.dir]:
RPath.copy(rp, self.dest)
rpath.copy(rp, self.dest)
assert self.dest.lstat(), "%s doesn't exist" % self.dest.path
assert RPath.cmp(rp, self.dest)
assert RPath.cmp(self.dest, rp)
assert rpath.cmp(rp, self.dest)
assert rpath.cmp(self.dest, rp)
self.dest.delete()
......@@ -361,8 +361,8 @@ class FileAttributes(FileCopying):
"""Test attribute comparison success"""
testpairs = [(self.hl1, self.hl2)]
for a, b in testpairs:
assert RPath.cmp_attribs(a, b), "Err with %s %s" % (a.path, b.path)
assert RPath.cmp_attribs(b, a), "Err with %s %s" % (b.path, a.path)
assert rpath.cmp_attribs(a, b), "Err with %s %s" % (a.path, b.path)
assert rpath.cmp_attribs(b, a), "Err with %s %s" % (b.path, a.path)
def testCompFail(self):
"""Test attribute comparison failures"""
......@@ -370,16 +370,16 @@ class FileAttributes(FileCopying):
(self.exec1, self.exec2),
(self.rf, self.hl1)]
for a, b in testpairs:
assert not RPath.cmp_attribs(a, b), \
assert not rpath.cmp_attribs(a, b), \
"Err with %s %s" % (a.path, b.path)
assert not RPath.cmp_attribs(b, a), \
assert not rpath.cmp_attribs(b, a), \
"Err with %s %s" % (b.path, a.path)
def testCompRaise(self):
"""Should raise exception when file missing"""
self.assertRaises(RPathException, RPath.cmp_attribs,
self.assertRaises(RPathException, rpath.cmp_attribs,
self.nothing, self.hl1)
self.assertRaises(RPathException, RPath.cmp_attribs,
self.assertRaises(RPathException, rpath.cmp_attribs,
self.noperms, self.nothing)
def testCopyAttribs(self):
......@@ -389,8 +389,8 @@ class FileAttributes(FileCopying):
for rp in [self.noperms, self.nowrite, self.rf, self.exec1,
self.exec2, self.hl1, self.dir]:
t.touch()
RPath.copy_attribs(rp, t)
assert RPath.cmp_attribs(t, rp), \
rpath.copy_attribs(rp, t)
assert rpath.cmp_attribs(t, rp), \
"Attributes for file %s not copied successfully" % rp.path
t.delete()
......@@ -400,16 +400,16 @@ class FileAttributes(FileCopying):
if out.lstat(): out.delete()
for rp in [self.noperms, self.nowrite, self.rf, self.exec1,
self.exec2, self.hl1, self.dir, self.sym]:
RPath.copy_with_attribs(rp, out)
assert RPath.cmp(rp, out)
assert RPath.cmp_attribs(rp, out)
rpath.copy_with_attribs(rp, out)
assert rpath.cmp(rp, out)
assert rpath.cmp_attribs(rp, out)
out.delete()
def testCopyRaise(self):
"""Should raise exception for non-existent files"""
self.assertRaises(RPathException, RPath.copy_attribs,
self.assertRaises(RPathException, rpath.copy_attribs,
self.hl1, self.nothing)
self.assertRaises(RPathException, RPath.copy_attribs,
self.assertRaises(RPathException, rpath.copy_attribs,
self.nothing, self.nowrite)
class CheckPath(unittest.TestCase):
......
from __future__ import generators
import re, StringIO, unittest
import re, StringIO, unittest, types
from commontest import *
from rdiff_backup.selection import *
from rdiff_backup.destructive_stepping import *
from rdiff_backup import Globals
from rdiff_backup import Globals, rpath, lazy
class MatchingTest(unittest.TestCase):
"""Test matching of file names against various selection functions"""
def makedsrp(self, path): return DSRPath(1, Globals.local_connection, path)
def makerp(self, path): return rpath.RPath(Globals.local_connection, path)
def makeext(self, path): return self.root.new_index(tuple(path.split("/")))
def setUp(self):
self.root = DSRPath(1, Globals.local_connection, "testfiles/select")
self.root = rpath.RPath(Globals.local_connection, "testfiles/select")
self.Select = Select(self.root)
def testRegexp(self):
......@@ -23,9 +22,9 @@ class MatchingTest(unittest.TestCase):
assert sf1(self.root.append("1.doc")) == None
sf2 = self.Select.regexp_get_sf("hello", 0)
assert sf2(self.makedsrp("hello")) == 0
assert sf2(self.makedsrp("foohello_there")) == 0
assert sf2(self.makedsrp("foo")) == None
assert sf2(self.makerp("hello")) == 0
assert sf2(self.makerp("foohello_there")) == 0
assert sf2(self.makerp("foo")) == None
def testTupleInclude(self):
"""Test include selection function made from a regular filename"""
......@@ -242,7 +241,7 @@ testfiles/select/1/1
def testRoot(self):
"""testRoot - / may be a counterexample to several of these.."""
root = DSRPath(1, Globals.local_connection, "/")
root = rpath.RPath(Globals.local_connection, "/")
select = Select(root)
assert select.glob_get_sf("/", 1)(root) == 1
......@@ -267,7 +266,7 @@ testfiles/select/1/1
def testOtherFilesystems(self):
"""Test to see if --exclude-other-filesystems works correctly"""
root = DSRPath(1, Globals.local_connection, "/")
root = rpath.RPath(Globals.local_connection, "/")
select = Select(root)
sf = select.other_filesystems_get_sf(0)
assert sf(root) is None
......@@ -284,12 +283,12 @@ class ParseArgsTest(unittest.TestCase):
def ParseTest(self, tuplelist, indicies, filelists = []):
"""No error if running select on tuple goes over indicies"""
if not self.root:
self.root = DSRPath(1, Globals.local_connection,
"testfiles/select")
self.root = RPath(Globals.local_connection, "testfiles/select")
self.Select = Select(self.root)
self.Select.ParseArgs(tuplelist, self.remake_filelists(filelists))
self.Select.set_iter()
assert Iter.equal(Iter.map(lambda dsrp: dsrp.index, self.Select),
assert lazy.Iter.equal(lazy.Iter.map(lambda dsrp: dsrp.index,
self.Select),
iter(indicies), verbose = 1)
def remake_filelists(self, filelist):
......@@ -408,12 +407,11 @@ testfiles/select**/2
def testAlternateRoot(self):
"""Test select with different root"""
self.root = DSRPath(1, Globals.local_connection,
"testfiles/select/1")
self.root = rpath.RPath(Globals.local_connection, "testfiles/select/1")
self.ParseTest([("--exclude", "testfiles/select/1/[23]")],
[(), ('1',), ('1', '1'), ('1', '2'), ('1', '3')])
self.root = DSRPath(1, Globals.local_connection, "/")
self.root = rpath.RPath(Globals.local_connection, "/")
self.ParseTest([("--exclude", "/home/*"),
("--include", "/home"),
("--exclude", "/")],
......@@ -421,12 +419,13 @@ testfiles/select**/2
def testParseStartingFrom(self):
"""Test parse, this time starting from inside"""
self.root = DSRPath(1, Globals.local_connection, "testfiles/select")
self.root = rpath.RPath(Globals.local_connection, "testfiles/select")
self.Select = Select(self.root)
self.Select.ParseArgs([("--include", "testfiles/select/1/1"),
("--exclude", "**")], [])
self.Select.set_iter(('1', '1'))
assert Iter.equal(Iter.map(lambda dsrp: dsrp.index, self.Select),
assert lazy.Iter.equal(lazy.Iter.map(lambda dsrp: dsrp.index,
self.Select),
iter([("1", '1', '1'),
('1', '1', '2'),
('1', '1', '3')]),
......
import unittest
from commontest import *
from rdiff_backup.statistics import *
from rdiff_backup import statistics, rpath
class StatsObjTest(unittest.TestCase):
"""Test StatsObj class"""
......@@ -24,17 +24,17 @@ class StatsObjTest(unittest.TestCase):
def test_get_stats(self):
"""Test reading and writing stat objects"""
s = StatsObj()
s = statistics.StatsObj()
assert s.get_stat('SourceFiles') is None
self.set_obj(s)
assert s.get_stat('SourceFiles') == 1
s1 = StatsITRB()
s1 = statistics.ITRB()
assert s1.get_stat('SourceFiles') == 0
def test_get_stats_string(self):
"""Test conversion of stat object into string"""
s = StatsObj()
s = statistics.StatsObj()
stats_string = s.get_stats_string()
assert stats_string == "", stats_string
......@@ -62,7 +62,7 @@ TotalDestinationSizeChange 7 (7 bytes)
def test_line_string(self):
"""Test conversion to a single line"""
s = StatsObj()
s = statistics.StatsObj()
self.set_obj(s)
statline = s.get_stats_line(("sample", "index", "w", "new\nline"))
assert statline == "sample/index/w/new\\nline 1 2 13 14 " \
......@@ -77,7 +77,7 @@ TotalDestinationSizeChange 7 (7 bytes)
def test_byte_summary(self):
"""Test conversion of bytes to strings like 7.23MB"""
s = StatsObj()
s = statistics.StatsObj()
f = s.get_byte_summary_string
assert f(1) == "1 byte"
assert f(234.34) == "234 bytes"
......@@ -89,36 +89,36 @@ TotalDestinationSizeChange 7 (7 bytes)
def test_init_stats(self):
"""Test setting stat object from string"""
s = StatsObj()
s = statistics.StatsObj()
s.set_stats_from_string("NewFiles 3 hello there")
for attr in s.stat_attrs:
if attr == 'NewFiles': assert s.get_stat(attr) == 3
else: assert s.get_stat(attr) is None, (attr, s.__dict__[attr])
s1 = StatsObj()
s1 = statistics.StatsObj()
self.set_obj(s1)
assert not s1.stats_equal(s)
s2 = StatsObj()
s2 = statistics.StatsObj()
s2.set_stats_from_string(s1.get_stats_string())
assert s1.stats_equal(s2)
def test_write_rp(self):
"""Test reading and writing of statistics object"""
rp = RPath(Globals.local_connection, "testfiles/statstest")
rp = rpath.RPath(Globals.local_connection, "testfiles/statstest")
if rp.lstat(): rp.delete()
s = StatsObj()
s = statistics.StatsObj()
self.set_obj(s)
s.write_stats_to_rp(rp)
s2 = StatsObj()
s2 = statistics.StatsObj()
assert not s2.stats_equal(s)
s2.read_stats_from_rp(rp)
assert s2.stats_equal(s)
def testAverage(self):
"""Test making an average statsobj"""
s1 = StatsObj()
s1 = statistics.StatsObj()
s1.StartTime = 5
s1.EndTime = 10
s1.ElapsedTime = 5
......@@ -126,7 +126,7 @@ TotalDestinationSizeChange 7 (7 bytes)
s1.SourceFiles = 100
s1.NewFileSize = 4
s2 = StatsObj()
s2 = statistics.StatsObj()
s2.StartTime = 25
s2.EndTime = 35
s2.ElapsedTime = 10
......@@ -134,7 +134,7 @@ TotalDestinationSizeChange 7 (7 bytes)
s2.SourceFiles = 50
s2.DeletedFiles = 0
s3 = StatsObj().set_to_average([s1, s2])
s3 = statistics.StatsObj().set_to_average([s1, s2])
assert s3.StartTime is s3.EndTime is None
assert s3.ElapsedTime == 7.5
assert s3.DeletedFiles is s3.NewFileSize is None, (s3.DeletedFiles,
......
import unittest, time
import unittest, time, types
from commontest import *
from rdiff_backup import Globals, Time
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment