Commit e95885f3 authored by bescoto's avatar bescoto

Major refactoring - avoid use of 'from XX import *' in favor of more

normal 'import XXX' syntax.  The previous way was an artifact from
earlier versions where the whole program fit in one file.


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@252 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent 7cfed788
......@@ -27,9 +27,7 @@ them over the usual 255 character limit.
"""
import re
from log import *
from robust import *
import Globals
import Globals, log
max_filename_length = 255
......@@ -55,8 +53,8 @@ def set_init_quote_vals_local():
global chars_to_quote, quoting_char
chars_to_quote = Globals.chars_to_quote
if len(Globals.quoting_char) != 1:
Log.FatalError("Expected single character for quoting char,"
"got '%s' instead" % (Globals.quoting_char,))
log.Log.FatalError("Expected single character for quoting char,"
"got '%s' instead" % (Globals.quoting_char,))
quoting_char = Globals.quoting_char
init_quoting_regexps()
......@@ -68,8 +66,8 @@ def init_quoting_regexps():
re.compile("[%s%s]" % (chars_to_quote, quoting_char), re.S)
unquoting_regexp = re.compile("%s[0-9]{3}" % quoting_char, re.S)
except re.error:
Log.FatalError("Error '%s' when processing char quote list %s" %
(re.error, chars_to_quote))
log.Log.FatalError("Error '%s' when processing char quote list %s" %
(re.error, chars_to_quote))
def quote(path):
"""Return quoted version of given path
......@@ -95,18 +93,4 @@ def unquote_single(match):
assert len(match.group()) == 4
return chr(int(match.group()[1:]))
def get_quoted_dir_children(rpath):
"""For rpath directory, return list of quoted children in dir"""
if not rpath.isdir(): return []
dir_pairs = [(unquote(filename), filename)
for filename in Robust.listrp(rpath)]
dir_pairs.sort() # sort by real index, not quoted part
child_list = []
for unquoted, filename in dir_pairs:
childrp = rpath.append(unquoted)
childrp.quote_path()
child_list.append(childrp)
return child_list
......@@ -246,7 +246,7 @@ def postset_regexp_local(name, re_string, flags):
if flags: globals()[name] = re.compile(re_string, flags)
else: globals()[name] = re.compile(re_string)
def set_select(source, rpath, tuplelist, quote_mode, *filelists):
def set_select(source, Sel_Obj, rpath, tuplelist, quote_mode, *filelists):
"""Initialize select object using tuplelist
Note that each list in filelists must each be passed as
......@@ -256,12 +256,8 @@ def set_select(source, rpath, tuplelist, quote_mode, *filelists):
"""
global select_source, select_mirror
sel = Select(rpath, quote_mode)
sel = Sel_Obj(rpath, quote_mode)
sel.ParseArgs(tuplelist, filelists)
if source: select_source = sel
else: select_mirror = sel
from rpath import * # kludge to avoid circularity - not needed in this module
from log import * # another kludge
from selection import *
......@@ -32,7 +32,7 @@ side. The source side should only transmit inode information.
from __future__ import generators
import cPickle
import Globals, Time, TempFile, rpath, log, robust
# In all of these lists of indicies are the values. The keys in
# _inode_ ones are (inode, devloc) pairs.
......@@ -138,8 +138,8 @@ def restore_link(index, rpath):
for linked_index in _src_index_indicies[index]:
if linked_index in _restore_index_path:
srcpath = _restore_index_path[linked_index]
Log("Restoring %s by hard linking to %s" %
(rpath.path, srcpath), 6)
log.Log("Restoring %s by hard linking to %s" %
(rpath.path, srcpath), 6)
rpath.hardlink(srcpath)
return 1
_restore_index_path[index] = rpath.path
......@@ -148,8 +148,8 @@ def restore_link(index, rpath):
def link_rp(src_rorp, dest_rpath, dest_root = None):
"""Make dest_rpath into a link analogous to that of src_rorp"""
if not dest_root: dest_root = dest_rpath # use base of dest_rpath
dest_link_rpath = RPath(dest_root.conn, dest_root.base,
get_indicies(src_rorp, 1)[0])
dest_link_rpath = rpath.RPath(dest_root.conn, dest_root.base,
get_indicies(src_rorp, 1)[0])
dest_rpath.hardlink(dest_link_rpath.path)
def write_linkdict(rpath, dict, compress = None):
......@@ -161,13 +161,13 @@ def write_linkdict(rpath, dict, compress = None):
"""
assert (Globals.isbackup_writer and
rpath.conn is Globals.local_connection)
tf = TempFileManager.new(rpath)
tf = TempFile.new(rpath)
def init():
fp = tf.open("wb", compress)
cPickle.dump(dict, fp)
assert not fp.close()
tf.setdata()
Robust.make_tf_robustaction(init, (tf,), (rpath,)).execute()
robust.make_tf_robustaction(init, (tf,), (rpath,)).execute()
def get_linkrp(data_rpath, time, prefix):
"""Return RPath of linkdata, or None if cannot find"""
......@@ -191,7 +191,7 @@ def final_writedata():
"""Write final checkpoint data to rbdir after successful backup"""
global final_inc
if _src_index_indicies:
Log("Writing hard link data", 6)
log.Log("Writing hard link data", 6)
if Globals.compression:
final_inc = Globals.rbdir.append("hardlink_data.%s.data.gz" %
Time.curtimestr)
......@@ -218,7 +218,7 @@ def final_checkpoint(data_rpath):
after every 20 seconds or whatever, but just at the end.
"""
Log("Writing intermediate hard link data to disk", 2)
log.Log("Writing intermediate hard link data to disk", 2)
src_inode_rp = data_rpath.append("hardlink_source_inode_checkpoint."
"%s.data" % Time.curtimestr)
src_index_rp = data_rpath.append("hardlink_source_index_checkpoint."
......@@ -251,7 +251,7 @@ def retrieve_checkpoint(data_rpath, time):
dest_index = get_linkdata(data_rpath, time,
"hardlink_dest_index_checkpoint")
except cPickle.UnpicklingError:
Log("Unpickling Error", 2)
log.Log("Unpickling Error", 2)
return None
if (src_inode is None or src_index is None or
dest_inode is None or dest_index is None): return None
......@@ -271,7 +271,3 @@ def remove_all_checkpoints():
rp.delete()
from log import *
from robust import *
from rpath import *
import Globals, Time
......@@ -20,16 +20,10 @@
"""Start (and end) here - read arguments, set global settings, etc."""
from __future__ import generators
import getopt, sys, re
from log import *
from lazy import *
from connection import *
from rpath import *
from robust import *
from restore import *
from highlevel import *
from manage import *
import Globals, Time, SetConnections
import getopt, sys, re, os
from log import Log
import Globals, Time, SetConnections, selection, robust, rpath, \
manage, highlevel, connection, restore, FilenameMapping, Security
action = None
......@@ -164,7 +158,7 @@ def set_action():
if l == 0: commandline_error("No arguments given")
elif l == 1: action = "restore"
elif l == 2:
if RPath(Globals.local_connection, args[0]).isincfile():
if rpath.RPath(Globals.local_connection, args[0]).isincfile():
action = "restore"
else: action = "backup"
else: commandline_error("Too many arguments given")
......@@ -207,13 +201,14 @@ def misc_setup(rps):
Globals.postset_regexp('no_compression_regexp',
Globals.no_compression_regexp_string)
for conn in Globals.connections: Robust.install_signal_handlers()
for conn in Globals.connections: robust.install_signal_handlers()
def take_action(rps):
"""Do whatever action says"""
if action == "server": PipeConnection(sys.stdin, sys.stdout).Server()
if action == "server":
connection.PipeConnection(sys.stdin, sys.stdout).Server()
elif action == "backup": Backup(rps[0], rps[1])
elif action == "restore": restore(*rps)
elif action == "restore": Restore(*rps)
elif action == "restore-as-of": RestoreAsOf(rps[0], rps[1])
elif action == "test-server": SetConnections.TestConnections()
elif action == "list-changed-since": ListChangedSince(rps[0])
......@@ -247,14 +242,16 @@ def Backup(rpin, rpout):
backup_init_dirs(rpin, rpout)
if prevtime:
Time.setprevtime(prevtime)
HighLevel.Mirror_and_increment(rpin, rpout, incdir)
else: HighLevel.Mirror(rpin, rpout, incdir)
highlevel.HighLevel.Mirror_and_increment(rpin, rpout, incdir)
else: highlevel.HighLevel.Mirror(rpin, rpout, incdir)
rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout)
def backup_init_select(rpin, rpout):
"""Create Select objects on source and dest connections"""
rpin.conn.Globals.set_select(1, rpin, select_opts, None, *select_files)
rpout.conn.Globals.set_select(0, rpout, select_mirror_opts, 1)
rpin.conn.Globals.set_select(1, selection.Select,
rpin, select_opts, None, *select_files)
rpout.conn.Globals.set_select(0, selection.Select,
rpout, select_mirror_opts, 1)
def backup_init_dirs(rpin, rpout):
"""Make sure rpin and rpout are valid, init data dir and logging"""
......@@ -273,7 +270,7 @@ def backup_init_dirs(rpin, rpout):
datadir = rpout.append("rdiff-backup-data")
SetConnections.UpdateGlobal('rbdir', datadir)
incdir = RPath(rpout.conn, os.path.join(datadir.path, "increments"))
incdir = rpath.RPath(rpout.conn, os.path.join(datadir.path, "increments"))
prevtime = backup_get_mirrortime()
if rpout.lstat():
......@@ -336,14 +333,14 @@ def backup_touch_curmirror_local(rpin, rpout):
"""
datadir = Globals.rbdir
map(RPath.delete, backup_get_mirrorrps())
map(rpath.RPath.delete, backup_get_mirrorrps())
mirrorrp = datadir.append("current_mirror.%s.%s" % (Time.curtimestr,
"data"))
Log("Touching mirror marker %s" % mirrorrp.path, 6)
mirrorrp.touch()
RPath.copy_attribs(rpin, rpout)
rpath.copy_attribs(rpin, rpout)
def restore(src_rp, dest_rp = None):
def Restore(src_rp, dest_rp = None):
"""Main restoring function
Here src_rp should be an increment file, and if dest_rp is
......@@ -373,7 +370,7 @@ def restore_common(rpin, target, time):
inc_rpath = datadir.append_path('increments', index)
restore_init_select(mirror_root, target)
restore_start_log(rpin, target, time)
Restore.Restore(inc_rpath, mirror, target, time)
restore.Restore(inc_rpath, mirror, target, time)
Log("Restore ended", 4)
def restore_start_log(rpin, target, time):
......@@ -398,8 +395,8 @@ def restore_check_paths(rpin, rpout, restoreasof = None):
Try restoring from an increment file (the filenames look like
"foobar.2001-09-01T04:49:04-07:00.diff").""" % rpin.path)
if not rpout: rpout = RPath(Globals.local_connection,
rpin.getincbase_str())
if not rpout: rpout = rpath.RPath(Globals.local_connection,
rpin.getincbase_str())
if rpout.lstat():
Log.FatalError("Restore target %s already exists, "
"and will not be overwritten." % rpout.path)
......@@ -413,8 +410,9 @@ def restore_init_select(rpin, rpout):
the restore operation isn't.
"""
Globals.set_select(1, rpin, select_mirror_opts, None)
Globals.set_select(0, rpout, select_opts, None, *select_files)
Globals.set_select(1, selection.Select, rpin, select_mirror_opts, None)
Globals.set_select(0, selection.Select,
rpout, select_opts, None, *select_files)
def restore_get_root(rpin):
"""Return (mirror root, index) and set the data dir
......@@ -438,7 +436,7 @@ def restore_get_root(rpin):
i = len(pathcomps)
while i >= 2:
parent_dir = RPath(rpin.conn, "/".join(pathcomps[:i]))
parent_dir = rpath.RPath(rpin.conn, "/".join(pathcomps[:i]))
if (parent_dir.isdir() and
"rdiff-backup-data" in parent_dir.listdir()): break
i = i-1
......@@ -467,11 +465,11 @@ def ListIncrements(rp):
mirror_root.append_path("rdiff-backup-data")
mirrorrp = mirror_root.new_index(index)
inc_rpath = datadir.append_path('increments', index)
incs = Restore.get_inclist(inc_rpath)
mirror_time = Restore.get_mirror_time()
incs = restore.get_inclist(inc_rpath)
mirror_time = restore.get_mirror_time()
if Globals.parsable_output:
print Manage.describe_incs_parsable(incs, mirror_time, mirrorrp)
else: print Manage.describe_incs_human(incs, mirror_time, mirrorrp)
print manage.describe_incs_parsable(incs, mirror_time, mirrorrp)
else: print manage.describe_incs_human(incs, mirror_time, mirrorrp)
def CalculateAverage(rps):
......@@ -495,7 +493,7 @@ def RemoveOlderThan(rootrp):
Log("Deleting increment(s) before %s" % timep, 4)
times_in_secs = map(lambda inc: Time.stringtotime(inc.getinctime()),
Restore.get_inclist(datadir.append("increments")))
restore.get_inclist(datadir.append("increments")))
times_in_secs = filter(lambda t: t < time, times_in_secs)
if not times_in_secs:
Log.FatalError("No increments older than %s found" % timep)
......@@ -510,7 +508,7 @@ def RemoveOlderThan(rootrp):
if len(times_in_secs) == 1:
Log("Deleting increment at time:\n" + inc_pretty_time, 3)
else: Log("Deleting increments at times:\n" + inc_pretty_time, 3)
Manage.delete_earlier_than(datadir, time)
manage.delete_earlier_than(datadir, time)
def ListChangedSince(rp):
......@@ -519,12 +517,12 @@ def ListChangedSince(rp):
except Time.TimeException, exc: Log.FatalError(str(exc))
mirror_root, index = restore_get_root(rp)
Globals.rbdir = datadir = mirror_root.append_path("rdiff-backup-data")
mirror_time = Restore.get_mirror_time()
mirror_time = restore.get_mirror_time()
def get_rids_recursive(rid):
"""Yield all the rids under rid that have inc newer than rest_time"""
yield rid
for sub_rid in Restore.yield_rids(rid, rest_time, mirror_time):
for sub_rid in restore.yield_rids(rid, rest_time, mirror_time):
for sub_sub_rid in get_rids_recursive(sub_rid): yield sub_sub_rid
def determineChangeType(incList):
......@@ -538,8 +536,8 @@ def ListChangedSince(rp):
else: return "Unknown!"
inc_rpath = datadir.append_path('increments', index)
inc_list = Restore.get_inclist(inc_rpath)
root_rid = RestoreIncrementData(index, inc_rpath, inc_list)
inc_list = restore.get_inclist(inc_rpath)
root_rid = restore.RestoreIncrementData(index, inc_rpath, inc_list)
for rid in get_rids_recursive(root_rid):
if rid.inc_list:
if not rid.index: path = "."
......
......@@ -19,8 +19,8 @@
"""Misc statistics methods, pertaining to dir and session stat files"""
from statistics import *
import time
import Globals, Hardlink, increment, log, statistics, Time
# This is the RPath of the directory statistics file, and the
# associated open file. It will hold a line of statistics for
......@@ -34,7 +34,7 @@ _dir_stats_header = """# rdiff-backup directory statistics file
#
# Each line is in the following format:
# RelativeDirName %s
""" % " ".join(StatsObj.stat_file_attrs)
""" % " ".join(statistics.StatsObj.stat_file_attrs)
def open_dir_stats_file():
"""Open directory statistics file, write header"""
......@@ -43,12 +43,12 @@ def open_dir_stats_file():
if Globals.compression: suffix = "data.gz"
else: suffix = "data"
_dir_stats_rp = Inc.get_inc(Globals.rbdir.append("directory_statistics"),
Time.curtime, suffix)
_dir_stats_rp = increment.get_inc(
Globals.rbdir.append("directory_statistics"), Time.curtime, suffix)
if _dir_stats_rp.lstat():
Log("Warning, statistics file %s already exists, appending" %
_dir_stats_rp.path, 2)
log.Log("Warning, statistics file %s already exists, appending" %
_dir_stats_rp.path, 2)
_dir_stats_fp = _dir_stats_rp.open("ab", Globals.compression)
else: _dir_stats_fp = _dir_stats_rp.open("wb", Globals.compression)
_dir_stats_fp.write(_dir_stats_header)
......@@ -68,8 +68,8 @@ def close_dir_stats_file():
def write_session_statistics(statobj):
"""Write session statistics into file, log"""
stat_inc = Inc.get_inc(Globals.rbdir.append("session_statistics"),
Time.curtime, "data")
stat_inc = increment.get_inc(
Globals.rbdir.append("session_statistics"), Time.curtime, "data")
statobj.StartTime = Time.curtime
statobj.EndTime = time.time()
......@@ -85,9 +85,8 @@ def write_session_statistics(statobj):
statobj.write_stats_to_rp(stat_inc)
if Globals.print_statistics:
message = statobj.get_stats_logstring("Session statistics")
Log.log_to_file(message)
log.Log.log_to_file(message)
Globals.client_conn.sys.stdout.write(message)
from increment import *
import Hardlink
......@@ -25,10 +25,10 @@ RobustAction and the like.
"""
import os, librsync
from log import Log
import robust, TempFile, Globals
class RdiffException(Exception): pass
def get_signature(rp):
"""Take signature of rpin file and return in file object"""
Log("Getting signature of %s" % rp.path, 7)
......@@ -52,9 +52,9 @@ def write_delta_action(basis, new, delta, compress = None):
before written to delta.
"""
delta_tf = TempFileManager.new(delta)
delta_tf = TempFile.new(delta)
def init(): write_delta(basis, new, delta_tf, compress)
return Robust.make_tf_robustaction(init, delta_tf, delta)
return robust.make_tf_robustaction(init, delta_tf, delta)
def write_delta(basis, new, delta, compress = None):
"""Write rdiff delta which brings basis to new"""
......@@ -74,12 +74,12 @@ def patch_action(rp_basis, rp_delta, rp_out = None, out_tf = None,
"""
if not rp_out: rp_out = rp_basis
if not out_tf: out_tf = TempFileManager.new(rp_out)
if not out_tf: out_tf = TempFile.new(rp_out)
def init():
rp_basis.conn.Rdiff.patch_local(rp_basis, rp_delta,
out_tf, delta_compressed)
out_tf.setdata()
return Robust.make_tf_robustaction(init, out_tf, rp_out)
return robust.make_tf_robustaction(init, out_tf, rp_out)
def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
"""Patch routine that must be run on rp_basis.conn
......@@ -99,20 +99,20 @@ def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
def patch_with_attribs_action(rp_basis, rp_delta, rp_out = None):
"""Like patch_action, but also transfers attributs from rp_delta"""
if not rp_out: rp_out = rp_basis
tf = TempFileManager.new(rp_out)
return Robust.chain_nested(patch_action(rp_basis, rp_delta, rp_out, tf),
Robust.copy_attribs_action(rp_delta, tf))
tf = TempFile.new(rp_out)
return robust.chain_nested(patch_action(rp_basis, rp_delta, rp_out, tf),
robust.copy_attribs_action(rp_delta, tf))
def copy_action(rpin, rpout):
"""Use rdiff to copy rpin to rpout, conserving bandwidth"""
if not rpin.isreg() or not rpout.isreg() or rpin.conn is rpout.conn:
# rdiff not applicable, fallback to regular copying
return Robust.copy_action(rpin, rpout)
return robust.copy_action(rpin, rpout)
Log("Rdiff copying %s to %s" % (rpin.path, rpout.path), 6)
out_tf = TempFileManager.new(rpout)
out_tf = TempFile.new(rpout)
def init(): rpout.conn.Rdiff.copy_local(rpin, rpout, out_tf)
return Robust.make_tf_robustaction(init, out_tf, rpout)
return robust.make_tf_robustaction(init, out_tf, rpout)
def copy_local(rpin, rpout, rpnew):
"""Write rpnew == rpin using rpout as basis. rpout and rpnew local"""
......@@ -122,6 +122,4 @@ def copy_local(rpin, rpout, rpnew):
rpnew.write_from_fileobj(librsync.PatchedFile(rpout.open("rb"), deltafile))
from log import *
from robust import *
......@@ -20,8 +20,7 @@
"""Functions to make sure remote requests are kosher"""
import sys, tempfile
import Globals, Main
from rpath import *
import Globals, Main, rpath
class Violation(Exception):
"""Exception that indicates an improper request has been received"""
......@@ -76,8 +75,8 @@ def set_security_level(action, cmdpairs):
rdir = tempfile.gettempdir()
elif islocal(cp1):
sec_level = "read-only"
rdir = Main.restore_get_root(RPath(Globals.local_connection,
getpath(cp1)))[0].path
rdir = Main.restore_get_root(rpath.RPath(Globals.local_connection,
getpath(cp1)))[0].path
else:
assert islocal(cp2)
sec_level = "all"
......@@ -101,8 +100,8 @@ def set_security_level(action, cmdpairs):
else: assert 0, "Unknown action %s" % action
Globals.security_level = sec_level
Globals.restrict_path = RPath(Globals.local_connection,
rdir).normalize().path
Globals.restrict_path = rpath.RPath(Globals.local_connection,
rdir).normalize().path
def set_allowed_requests(sec_level):
"""Set the allowed requests list using the security level"""
......@@ -111,44 +110,46 @@ def set_allowed_requests(sec_level):
allowed_requests = ["VirtualFile.readfromid", "VirtualFile.closebyid",
"Globals.get", "Globals.is_not_None",
"Globals.get_dict_val",
"Log.open_logfile_allconn",
"Log.close_logfile_allconn",
"log.Log.open_logfile_allconn",
"log.Log.close_logfile_allconn",
"SetConnections.add_redirected_conn",
"RedirectedRun",
"sys.stdout.write"]
if sec_level == "minimal": pass
elif sec_level == "read-only" or sec_level == "update-only":
allowed_requests.extend(["C.make_file_dict",
"os.getuid",
"os.listdir",
"Time.setcurtime_local",
"Resume.ResumeCheck",
"HLSourceStruct.split_initial_dsiter",
"HLSourceStruct.get_diffs_and_finalize",
"RPathStatic.gzip_open_local_read",
"RPathStatic.open_local_read"])
allowed_requests.extend(
["C.make_file_dict",
"os.getuid",
"os.listdir",
"Time.setcurtime_local",
"robust.Resume.ResumeCheck",
"highlevel.HLSourceStruct.split_initial_dsiter",
"highlevel.HLSourceStruct.get_diffs_and_finalize",
"rpath.gzip_open_local_read",
"rpath.open_local_read"])
if sec_level == "update-only":
allowed_requests. \
extend(["Log.open_logfile_local", "Log.close_logfile_local",
"Log.close_logfile_allconn", "Log.log_to_file",
"SaveState.init_filenames",
"SaveState.touch_last_file",
"HLDestinationStruct.get_sigs",
"HLDestinationStruct.patch_w_datadir_writes",
"HLDestinationStruct.patch_and_finalize",
"HLDestinationStruct.patch_increment_and_finalize",
"Main.backup_touch_curmirror_local",
"Globals.ITRB.increment_stat"])
allowed_requests.extend(
["Log.open_logfile_local", "Log.close_logfile_local",
"Log.close_logfile_allconn", "Log.log_to_file",
"robust.SaveState.init_filenames",
"robust.SaveState.touch_last_file",
"highlevel.HLDestinationStruct.get_sigs",
"highlevel.HLDestinationStruct.patch_w_datadir_writes",
"highlevel.HLDestinationStruct.patch_and_finalize",
"highlevel.HLDestinationStruct.patch_increment_and_finalize",
"Main.backup_touch_curmirror_local",
"Globals.ITRB.increment_stat"])
if Globals.server:
allowed_requests.extend(["SetConnections.init_connection_remote",
"Log.setverbosity",
"Log.setterm_verbosity",
"Time.setprevtime_local",
"FilenameMapping.set_init_quote_vals_local",
"Globals.postset_regexp_local",
"Globals.set_select",
"HLSourceStruct.set_session_info",
"HLDestinationStruct.set_session_info"])
allowed_requests.extend(
["SetConnections.init_connection_remote",
"Log.setverbosity",
"Log.setterm_verbosity",
"Time.setprevtime_local",
"FilenameMapping.set_init_quote_vals_local",
"Globals.postset_regexp_local",
"Globals.set_select",
"highlevel.HLSourceStruct.set_session_info",
"highlevel.HLDestinationStruct.set_session_info"])
def vet_request(request, arglist):
"""Examine request for security violations"""
......@@ -156,7 +157,7 @@ def vet_request(request, arglist):
security_level = Globals.security_level
if Globals.restrict_path:
for arg in arglist:
if isinstance(arg, RPath): vet_rpath(arg)
if isinstance(arg, rpath.RPath): vet_rpath(arg)
if security_level == "all": return
if request.function_string in allowed_requests: return
if request.function_string == "Globals.set":
......
......@@ -25,6 +25,10 @@ the related connections.
"""
import os
from log import Log
import Globals, FilenameMapping, connection, rpath
# This is the schema that determines how rdiff-backup will open a
# pipe to the remote system. If the file is given as A::B, %s will
# be substituted with A in the schema.
......@@ -68,7 +72,7 @@ def cmdpair2rp(cmd_pair):
cmd, filename = cmd_pair
if cmd: conn = init_connection(cmd)
else: conn = Globals.local_connection
return RPath(conn, filename).normalize()
return rpath.RPath(conn, filename).normalize()
def desc2cmd_pairs(desc_pair):
"""Return pair (remote_cmd, filename) from desc_pair"""
......@@ -127,7 +131,7 @@ def init_connection(remote_cmd):
Log("Executing " + remote_cmd, 4)
stdin, stdout = os.popen2(remote_cmd)
conn_number = len(Globals.connections)
conn = PipeConnection(stdout, stdin, conn_number)
conn = connection.PipeConnection(stdout, stdin, conn_number)
check_connection_version(conn, remote_cmd)
Log("Registering connection %d" % conn_number, 7)
......@@ -138,7 +142,7 @@ def init_connection(remote_cmd):
def check_connection_version(conn, remote_cmd):
"""Log warning if connection has different version"""
try: remote_version = conn.Globals.get('version')
except ConnectionReadError, exception:
except connection.ConnectionReadError, exception:
Log.FatalError("""%s
Couldn't start up the remote connection by executing
......@@ -184,7 +188,7 @@ def init_connection_remote(conn_number):
def add_redirected_conn(conn_number):
"""Run on server side - tell about redirected connection"""
Globals.connection_dict[conn_number] = \
RedirectedConnection(conn_number)
connection.RedirectedConnection(conn_number)
def UpdateGlobal(setting_name, val):
"""Update value of global variable across all connections"""
......@@ -230,9 +234,3 @@ Local version: %s
Remote version: %s""" % (Globals.version, version)
else: print "Server OK"
from log import *
from rpath import *
from connection import *
import Globals, FilenameMapping
......@@ -20,7 +20,7 @@
"""Support code for remote execution and data transfer"""
from __future__ import generators
import types, os, tempfile, cPickle, shutil, traceback, pickle, socket
import types, os, tempfile, cPickle, shutil, traceback, pickle, socket, sys
class ConnectionError(Exception): pass
......@@ -121,11 +121,13 @@ class LowLevelPipeConnection(Connection):
"""Put an object into the pipe (will send raw if string)"""
Log.conn("sending", obj, req_num)
if type(obj) is types.StringType: self._putbuf(obj, req_num)
elif isinstance(obj, Connection): self._putconn(obj, req_num)
elif isinstance(obj, TempFile): self._puttempfile(obj, req_num)
elif isinstance(obj, DSRPath): self._putdsrpath(obj, req_num)
elif isinstance(obj, RPath): self._putrpath(obj, req_num)
elif isinstance(obj, RORPath): self._putrorpath(obj, req_num)
elif isinstance(obj, connection.Connection):self._putconn(obj, req_num)
elif isinstance(obj, TempFile.TempFile):
self._puttempfile(obj, req_num)
elif isinstance(obj, destructive_stepping.DSRPath):
self._putdsrpath(obj, req_num)
elif isinstance(obj, rpath.RPath): self._putrpath(obj, req_num)
elif isinstance(obj, rpath.RORPath): self._putrorpath(obj, req_num)
elif ((hasattr(obj, "read") or hasattr(obj, "write"))
and hasattr(obj, "close")): self._putfile(obj, req_num)
elif hasattr(obj, "next"): self._putiter(obj, req_num)
......@@ -146,7 +148,7 @@ class LowLevelPipeConnection(Connection):
def _putiter(self, iterator, req_num):
"""Put an iterator through the pipe"""
self._write("i", str(VirtualFile.new(RORPIter.ToFile(iterator))),
self._write("i", str(VirtualFile.new(rorpiter.ToFile(iterator))),
req_num)
def _puttempfile(self, tempfile, req_num):
......@@ -239,8 +241,8 @@ class LowLevelPipeConnection(Connection):
elif format_string == "b": result = data
elif format_string == "f": result = VirtualFile(self, int(data))
elif format_string == "i":
result = RORPIter.FromFile(BufferedRead(VirtualFile(self,
int(data))))
result = rorpiter.FromFile(iterfile.BufferedRead(
VirtualFile(self, int(data))))
elif format_string == "t": result = self._gettempfile(data)
elif format_string == "r": result = self._getrorpath(data)
elif format_string == "R": result = self._getrpath(data)
......@@ -254,23 +256,25 @@ class LowLevelPipeConnection(Connection):
def _getrorpath(self, raw_rorpath_buf):
"""Reconstruct RORPath object from raw data"""
index, data = cPickle.loads(raw_rorpath_buf)
return RORPath(index, data)
return rpath.RORPath(index, data)
def _gettempfile(self, raw_tf_buf):
"""Return TempFile object indicated by raw_tf_buf"""
conn_number, base, index, data = cPickle.loads(raw_tf_buf)
return TempFile(Globals.connection_dict[conn_number],
base, index, data)
return TempFile.TempFile(Globals.connection_dict[conn_number],
base, index, data)
def _getrpath(self, raw_rpath_buf):
"""Return RPath object indicated by raw_rpath_buf"""
conn_number, base, index, data = cPickle.loads(raw_rpath_buf)
return RPath(Globals.connection_dict[conn_number], base, index, data)
return rpath.RPath(Globals.connection_dict[conn_number],
base, index, data)
def _getdsrpath(self, raw_dsrpath_buf):
"""Return DSRPath object indicated by buf"""
conn_number, state_dict = cPickle.loads(raw_dsrpath_buf)
empty_dsrp = DSRPath("bypass", Globals.local_connection, None)
empty_dsrp = destructive_stepping.DSRPath("bypass",
Globals.local_connection, None)
empty_dsrp.__setstate__(state_dict)
empty_dsrp.conn = Globals.connection_dict[conn_number]
empty_dsrp.file = None
......@@ -538,22 +542,11 @@ class VirtualFile:
# everything has to be available here for remote connection's use, but
# put at bottom to reduce circularities.
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, Main
from static import *
from lazy import *
from log import *
from iterfile import *
from connection import *
from rpath import *
from robust import *
from rorpiter import *
from selection import *
from statistics import *
from increment import *
from restore import *
from manage import *
from highlevel import *
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \
Main, rorpiter, selection, increment, statistics, manage, lazy, \
iterfile, rpath, robust, restore, manage, highlevel, connection, \
TempFile, destructive_stepping, SetConnections
from log import Log
Globals.local_connection = LocalConnection()
Globals.connections.append(Globals.local_connection)
......
# Copyright 2002 Ben Escoto
#
# This file is part of rdiff-backup.
......@@ -21,14 +22,14 @@
from __future__ import generators
import types
from rpath import *
from lazy import *
import Globals, rpath, log
class DSRPPermError(Exception):
"""Exception used when a DSRPath can't get sufficient permissions"""
pass
class DSRPath(RPath):
class DSRPath(rpath.RPath):
"""Destructive Stepping RPath
Sometimes when we traverse the directory tree, even when we just
......@@ -59,11 +60,11 @@ class DSRPath(RPath):
"""
if base == 0:
assert isinstance(conn_or_rp, RPath)
RPath.__init__(self, conn_or_rp.conn,
conn_or_rp.base, conn_or_rp.index)
assert isinstance(conn_or_rp, rpath.RPath)
rpath.RPath.__init__(self, conn_or_rp.conn,
conn_or_rp.base, conn_or_rp.index)
self.path = conn_or_rp.path # conn_or_rp may be quoted
else: RPath.__init__(self, conn_or_rp, base, index)
else: rpath.RPath.__init__(self, conn_or_rp, base, index)
if source != "bypass":
# "bypass" val is used when unpackaging over connection
......@@ -107,8 +108,8 @@ class DSRPath(RPath):
if not self.hasfullperms(): self.chmod_bypass(0700)
def warn(self, err):
Log("Received error '%s' when dealing with file %s, skipping..."
% (err, self.path), 1)
log.Log("Received error '%s' when dealing with file %s, skipping..."
% (err, self.path), 1)
raise DSRPPermError(self.path)
def __getstate__(self):
......@@ -136,7 +137,7 @@ class DSRPath(RPath):
def chmod(self, permissions):
"""Change permissions, delaying if self.perms_delayed is set"""
if self.delay_perms: self.newperms = self.data['perms'] = permissions
else: RPath.chmod(self, permissions)
else: rpath.RPath.chmod(self, permissions)
def getperms(self):
"""Return dsrp's intended permissions"""
......@@ -148,7 +149,7 @@ class DSRPath(RPath):
"""Change permissions without updating the data dictionary"""
self.delay_perms = 1
if self.newperms is None: self.newperms = self.getperms()
Log("DSRP: Perm bypass %s to %o" % (self.path, permissions), 8)
log.Log("DSRP: Perm bypass %s to %o" % (self.path, permissions), 8)
self.conn.os.chmod(self.path, permissions)
def settime(self, accesstime, modtime):
......@@ -157,12 +158,12 @@ class DSRPath(RPath):
if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime
if not self.delay_atime or not self.delay_mtime:
RPath.settime(self, accesstime, modtime)
rpath.RPath.settime(self, accesstime, modtime)
def setmtime(self, modtime):
"""Change mtime, delaying if self.times_delayed is set"""
if self.delay_mtime: self.newmtime = self.data['mtime'] = modtime
else: RPath.setmtime(self, modtime)
else: rpath.RPath.setmtime(self, modtime)
def getmtime(self):
"""Return dsrp's intended modification time"""
......@@ -181,18 +182,18 @@ class DSRPath(RPath):
if not self.lstat(): return # File has been deleted in meantime
if self.delay_perms and self.newperms is not None:
Log("Finalizing permissions of dsrp %s to %s" %
(self.path, self.newperms), 8)
RPath.chmod(self, self.newperms)
log.Log("Finalizing permissions of dsrp %s to %s" %
(self.path, self.newperms), 8)
rpath.RPath.chmod(self, self.newperms)
do_atime = self.delay_atime and self.newatime is not None
do_mtime = self.delay_mtime and self.newmtime is not None
if do_atime and do_mtime:
RPath.settime(self, self.newatime, self.newmtime)
rpath.RPath.settime(self, self.newatime, self.newmtime)
elif do_atime and not do_mtime:
RPath.settime(self, self.newatime, self.getmtime())
rpath.RPath.settime(self, self.newatime, self.getmtime())
elif not do_atime and do_mtime:
RPath.setmtime(self, self.newmtime)
rpath.RPath.setmtime(self, self.newmtime)
def newpath(self, newpath, index = ()):
"""Return similar DSRPath but with new path"""
......@@ -208,29 +209,4 @@ class DSRPath(RPath):
return self.__class__(self.source, self.conn, self.base, index)
class DestructiveSteppingFinalizer(ITRBranch):
"""Finalizer that can work on an iterator of dsrpaths
The reason we have to use an IterTreeReducer is that some files
should be updated immediately, but for directories we sometimes
need to update all the files in the directory before finally
coming back to it.
"""
dsrpath = None
def start_process(self, index, dsrpath):
self.dsrpath = dsrpath
def end_process(self):
if self.dsrpath: self.dsrpath.write_changes()
def can_fast_process(self, index, dsrpath):
return not self.dsrpath.isdir()
def fast_process(self, index, dsrpath):
if self.dsrpath: self.dsrpath.write_changes()
from log import *
from robust import *
import Globals
from __future__ import generators
from manage import *
from rpath import *
import rpath, manage
#######################################################################
#
......
......@@ -20,17 +20,8 @@
"""High level functions for mirroring, mirror & inc, etc."""
from __future__ import generators
from static import *
class SkipFileException(Exception):
"""Signal that the current file should be skipped but then continue
This exception will often be raised when there is problem reading
an individual file, but it makes sense for the rest of the backup
to keep going.
"""
pass
import Globals, MiscStats, metadata, rorpiter, TempFile, \
Hardlink, robust, increment, rpath, lazy, static, log
class HighLevel:
......@@ -48,8 +39,8 @@ class HighLevel:
Otherwise only mirror and don't create any extra files.
"""
SourceS = src_rpath.conn.HLSourceStruct
DestS = dest_rpath.conn.HLDestinationStruct
SourceS = src_rpath.conn.highlevel.HLSourceStruct
DestS = dest_rpath.conn.highlevel.HLDestinationStruct
src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
......@@ -61,8 +52,8 @@ class HighLevel:
def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath,
session_info = None):
"""Mirror + put increments in tree based at inc_rpath"""
SourceS = src_rpath.conn.HLSourceStruct
DestS = dest_rpath.conn.HLDestinationStruct
SourceS = src_rpath.conn.highlevel.HLSourceStruct
DestS = dest_rpath.conn.highlevel.HLDestinationStruct
src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
......@@ -72,7 +63,7 @@ class HighLevel:
dest_rpath.setdata()
inc_rpath.setdata()
MakeStatic(HighLevel)
static.MakeStatic(HighLevel)
class HLSourceStruct:
......@@ -80,7 +71,7 @@ class HLSourceStruct:
def split_initial_dsiter(cls):
"""Set iterators of all dsrps from rpath, returning one"""
dsiter = Globals.select_source.set_iter()
initial_dsiter1, cls.initial_dsiter2 = Iter.multiplex(dsiter, 2)
initial_dsiter1, cls.initial_dsiter2 = lazy.Iter.multiplex(dsiter, 2)
return initial_dsiter1
def get_diffs_and_finalize(cls, sigiter):
......@@ -90,10 +81,10 @@ class HLSourceStruct:
dissimilar files.
"""
collated = RORPIter.CollateIterators(cls.initial_dsiter2, sigiter)
collated = rorpiter.CollateIterators(cls.initial_dsiter2, sigiter)
def error_handler(exc, dest_sig, rp):
Log("Error %s producing a diff of %s" %
(exc, rp and rp.path), 2)
log.Log("Error %s producing a diff of %s" %
(exc, rp and rp.path), 2)
return None
def diffs():
......@@ -101,12 +92,12 @@ class HLSourceStruct:
if dest_sig:
if dest_sig.isplaceholder(): yield dest_sig
else:
diff = Robust.check_common_error(
error_handler, RORPIter.diffonce, [dest_sig, rp])
diff = robust.check_common_error(
error_handler, rorpiter.diffonce, [dest_sig, rp])
if diff: yield diff
return diffs()
MakeClass(HLSourceStruct)
static.MakeClass(HLSourceStruct)
class HLDestinationStruct:
......@@ -115,7 +106,7 @@ class HLDestinationStruct:
def split_initial_dsiter(cls):
"""Set initial_dsiters (iteration of all rps from rpath)"""
result, cls.initial_dsiter2 = \
Iter.multiplex(Globals.select_mirror.set_iter(), 2)
lazy.Iter.multiplex(Globals.select_mirror.set_iter(), 2)
return result
def get_dissimilar(cls, baserp, src_init_iter, dest_init_iter):
......@@ -134,14 +125,14 @@ class HLDestinationStruct:
will depend on the Globals.conn_bufsize value.
"""
collated = RORPIter.CollateIterators(src_init_iter, dest_init_iter)
collated = rorpiter.CollateIterators(src_init_iter, dest_init_iter)
def compare(src_rorp, dest_dsrp):
"""Return dest_dsrp if they are different, None if the same"""
if not dest_dsrp:
dest_dsrp = cls.get_dsrp(baserp, src_rorp.index)
if dest_dsrp.lstat():
Log("Warning: Found unexpected destination file %s, "
"not processing it." % dest_dsrp.path, 2)
log.Log("Warning: Found unexpected destination file %s, "
"not processing it." % dest_dsrp.path, 2)
return None
elif (src_rorp and src_rorp == dest_dsrp and
(not Globals.preserve_hardlinks or
......@@ -162,7 +153,7 @@ class HLDestinationStruct:
counter = 0
yield dsrp
elif counter == 20:
placeholder = RORPath(src_rorp.index)
placeholder = rpath.RORPath(src_rorp.index)
placeholder.make_placeholder()
counter = 0
yield placeholder
......@@ -185,11 +176,11 @@ class HLDestinationStruct:
metadata.CloseMetadata()
dup = duplicate_with_write(src_init_iter)
dissimilars = cls.get_dissimilar(baserp, dup, dest_iters1)
return RORPIter.Signatures(dissimilars)
return rorpiter.Signatures(dissimilars)
def get_dsrp(cls, dest_rpath, index):
"""Return initialized rpath based on dest_rpath with given index"""
rp = RPath(dest_rpath.conn, dest_rpath.base, index)
rp = rpath.RPath(dest_rpath.conn, dest_rpath.base, index)
if Globals.quoting_enabled: rp.quote_path()
return rp
......@@ -197,14 +188,16 @@ class HLDestinationStruct:
"""Return finalizer, starting from session info if necessary"""
old_finalizer = cls._session_info and cls._session_info.finalizer
if old_finalizer: return old_finalizer
else: return IterTreeReducer(DestructiveSteppingFinalizer, [])
else: return rorpiter.IterTreeReducer(
rorpiter.DestructiveSteppingFinalizer, [])
def get_ITR(cls, inc_rpath):
"""Return ITR, starting from state if necessary"""
if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR
else:
iitr = IterTreeReducer(IncrementITRB, [inc_rpath])
iitr = rorpiter.IterTreeReducer(increment.IncrementITRB,
[inc_rpath])
iitr.root_branch.override_changed()
Globals.ITRB = iitr.root_branch
iitr.root_branch.Errors = 0
......@@ -214,38 +207,38 @@ class HLDestinationStruct:
"""Return MirrorITR, starting from state if available"""
if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR
ITR = IterTreeReducer(MirrorITRB, [inc_rpath])
ITR = rorpiter.IterTreeReducer(increment.MirrorITRB, [inc_rpath])
Globals.ITRB = ITR.root_branch
ITR.root_branch.Errors = 0
return ITR
def patch_and_finalize(cls, dest_rpath, diffs):
"""Apply diffs and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer = cls.get_finalizer()
diff_rorp, rp = None, None
def patch(diff_rorp, dsrp):
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and not diff_rorp.isplaceholder():
RORPIter.patchonce_action(None, dsrp, diff_rorp).execute()
rorpiter.patchonce_action(None, dsrp, diff_rorp).execute()
return dsrp
def error_handler(exc, diff_rorp, dsrp):
filename = dsrp and dsrp.path or os.path.join(*diff_rorp.index)
Log("Error: %s processing file %s" % (exc, filename), 2)
log.Log("Error: %s processing file %s" % (exc, filename), 2)
for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7)
log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
dsrp = Robust.check_common_error(error_handler, patch,
dsrp = robust.check_common_error(error_handler, patch,
[diff_rorp, dsrp])
#finalizer(dsrp.index, dsrp)
#finalizer.Finish()
def patch_w_datadir_writes(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs and finalize, with checkpointing and statistics"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer, ITR = cls.get_finalizer(), cls.get_MirrorITR(inc_rpath)
finalizer, ITR = None, cls.get_MirrorITR(inc_rpath)
MiscStats.open_dir_stats_file()
......@@ -253,7 +246,7 @@ class HLDestinationStruct:
try:
for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7)
log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and diff_rorp.isplaceholder(): diff_rorp = None
......@@ -270,7 +263,7 @@ class HLDestinationStruct:
def patch_increment_and_finalize(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs, write increment if necessary, and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
collated = rorpiter.CollateIterators(diffs, cls.initial_dsiter2)
#finalizer, ITR = cls.get_finalizer(), cls.get_ITR(inc_rpath)
finalizer, ITR = None, cls.get_ITR(inc_rpath)
MiscStats.open_dir_stats_file()
......@@ -278,7 +271,7 @@ class HLDestinationStruct:
try:
for indexed_tuple in collated:
Log(lambda: "Processing %s" % str(indexed_tuple), 7)
log.Log(lambda: "Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
index = indexed_tuple.index
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, index)
......@@ -296,18 +289,12 @@ class HLDestinationStruct:
def handle_last_error(cls, dsrp, finalizer, ITR):
"""If catch fatal error, try to checkpoint before exiting"""
Log.exception(1, 2)
TracebackArchive.log()
log.Log.exception(1, 2)
robust.TracebackArchive.log()
#SaveState.checkpoint(ITR, finalizer, dsrp, 1)
#if Globals.preserve_hardlinks: Hardlink.final_checkpoint(Globals.rbdir)
#SaveState.touch_last_file_definitive()
raise
MakeClass(HLDestinationStruct)
static.MakeClass(HLDestinationStruct)
from log import *
from rpath import *
from robust import *
from increment import *
from rorpiter import *
import Globals, Hardlink, MiscStats, metadata
......@@ -20,7 +20,7 @@
"""Convert an iterator to a file object and vice-versa"""
import cPickle, array
import Globals, C
import Globals, C, robust, log
class IterFileException(Exception): pass
......@@ -200,7 +200,7 @@ class FileWrappingIter:
def addfromfile(self):
"""Read a chunk from the current file and return it"""
# Check file read for errors, buf = "" if find one
buf = Robust.check_common_error(self.read_error_handler,
buf = robust.check_common_error(self.read_error_handler,
self.currently_in_file.read,
[Globals.blocksize])
if not buf:
......@@ -210,7 +210,7 @@ class FileWrappingIter:
def read_error_handler(self, exc, blocksize):
"""Log error when reading from file"""
Log("Error '%s' reading from fileobj, truncating" % (str(exc),), 2)
log.Log("Error '%s' reading from fileobj, truncating" % (str(exc),), 2)
return ""
def _l2s_old(self, l):
......@@ -253,5 +253,4 @@ class BufferedRead:
def close(self): return self.file.close()
from log import *
from robust import *
......@@ -21,7 +21,8 @@
from __future__ import generators
import os, stat, types
from static import *
import static
class Iter:
"""Hold static methods for the manipulation of lazy iterators"""
......@@ -163,7 +164,7 @@ class Iter:
return tuple(map(make_iterator, range(num_of_forks)))
MakeStatic(Iter)
static.MakeStatic(Iter)
class IterMultiplex2:
......@@ -200,166 +201,3 @@ class IterMultiplex2:
else: elem = buf.pop(0) # a is in front, subtract an element
self.a_leading_by -= 1
yield elem
class IterTreeReducer:
"""Tree style reducer object for iterator
The indicies of a RORPIter form a tree type structure. This class
can be used on each element of an iter in sequence and the result
will be as if the corresponding tree was reduced. This tries to
bridge the gap between the tree nature of directories, and the
iterator nature of the connection between hosts and the temporal
order in which the files are processed.
"""
def __init__(self, branch_class, branch_args):
"""ITR initializer"""
self.branch_class = branch_class
self.branch_args = branch_args
self.index = None
self.root_branch = branch_class(*branch_args)
self.branches = [self.root_branch]
def finish_branches(self, index):
"""Run Finish() on all branches index has passed
When we pass out of a branch, delete it and process it with
the parent. The innermost branches will be the last in the
list. Return None if we are out of the entire tree, and 1
otherwise.
"""
branches = self.branches
while 1:
to_be_finished = branches[-1]
base_index = to_be_finished.base_index
if base_index != index[:len(base_index)]:
# out of the tree, finish with to_be_finished
to_be_finished.call_end_proc()
del branches[-1]
if not branches: return None
branches[-1].branch_process(to_be_finished)
else: return 1
def add_branch(self, index):
"""Return branch of type self.branch_class, add to branch list"""
branch = self.branch_class(*self.branch_args)
branch.base_index = index
self.branches.append(branch)
return branch
def process_w_branch(self, branch, args):
"""Run start_process on latest branch"""
Robust.check_common_error(branch.on_error,
branch.start_process, args)
if not branch.caught_exception: branch.start_successful = 1
def Finish(self):
"""Call at end of sequence to tie everything up"""
while 1:
to_be_finished = self.branches.pop()
to_be_finished.call_end_proc()
if not self.branches: break
self.branches[-1].branch_process(to_be_finished)
def __call__(self, *args):
"""Process args, where args[0] is current position in iterator
Returns true if args successfully processed, false if index is
not in the current tree and thus the final result is
available.
Also note below we set self.index after doing the necessary
start processing, in case there is a crash in the middle.
"""
index = args[0]
if self.index is None:
self.root_branch.base_index = index
self.process_w_branch(self.root_branch, args)
self.index = index
return 1
if index <= self.index:
Log("Warning: oldindex %s >= newindex %s" % (self.index, index), 2)
return 1
if self.finish_branches(index) is None:
return None # We are no longer in the main tree
last_branch = self.branches[-1]
if last_branch.start_successful:
if last_branch.can_fast_process(*args):
last_branch.fast_process(*args)
else:
branch = self.add_branch(index)
self.process_w_branch(branch, args)
else: last_branch.log_prev_error(index)
self.index = index
return 1
class ITRBranch:
"""Helper class for IterTreeReducer below
There are five stub functions below: start_process, end_process,
branch_process, can_fast_process, and fast_process. A class that
subclasses this one will probably fill in these functions to do
more.
It is important that this class be pickable, so keep that in mind
when subclassing (this is used to resume failed sessions).
"""
base_index = index = None
finished = None
caught_exception = start_successful = None
def call_end_proc(self):
"""Runs the end_process on self, checking for errors"""
if self.finished or not self.start_successful:
self.caught_exception = 1
if self.caught_exception: self.log_prev_error(self.base_index)
else: Robust.check_common_error(self.on_error, self.end_process)
self.finished = 1
def start_process(self, *args):
"""Do some initial processing (stub)"""
pass
def end_process(self):
"""Do any final processing before leaving branch (stub)"""
pass
def branch_process(self, branch):
"""Process a branch right after it is finished (stub)"""
assert branch.finished
pass
def can_fast_process(self, *args):
"""True if object can be processed without new branch (stub)"""
return None
def fast_process(self, *args):
"""Process args without new child branch (stub)"""
pass
def on_error(self, exc, *args):
"""This is run on any exception in start/end-process"""
self.caught_exception = 1
if args and args[0] and isinstance(args[0], tuple):
filename = os.path.join(*args[0])
elif self.index: filename = os.path.join(*self.index)
else: filename = "."
Log("Error '%s' processing %s" % (exc, filename), 2)
def log_prev_error(self, index):
"""Call function if no pending exception"""
Log("Skipping %s because of previous error" %
(os.path.join(*index),), 2)
# Put at bottom to prevent (viciously) circular module dependencies
from robust import *
from log import *
......@@ -20,6 +20,7 @@
"""Manage logging, displaying and recording messages with required verbosity"""
import time, sys, traceback, types
import Globals
class LoggerError(Exception): pass
......@@ -151,6 +152,7 @@ class Logger:
def FatalError(self, message):
self("Fatal Error: " + message, 1)
import Main
Main.cleanup()
sys.exit(1)
......@@ -180,4 +182,4 @@ class Logger:
logging_func(self.exception_to_string(), verbosity)
Log = Logger()
import Globals, Main
......@@ -20,91 +20,86 @@
"""list, delete, and otherwise manage increments"""
from __future__ import generators
from static import *
from log import *
import Globals, Time
from log import Log
import Globals, Time, static, manage
class ManageException(Exception): pass
class Manage:
def get_file_type(rp):
"""Returns one of "regular", "directory", "missing", or "special"."""
if not rp.lstat(): return "missing"
elif rp.isdir(): return "directory"
elif rp.isreg(): return "regular"
else: return "special"
def get_inc_type(inc):
"""Return file type increment represents"""
assert inc.isincfile()
type = inc.getinctype()
if type == "dir": return "directory"
elif type == "diff": return "regular"
elif type == "missing": return "missing"
elif type == "snapshot": return Manage.get_file_type(inc)
else: assert None, "Unknown type %s" % (type,)
def describe_incs_parsable(incs, mirror_time, mirrorrp):
"""Return a string parsable by computer describing the increments
Each line is a time in seconds of the increment, and then the
type of the file. It will be sorted oldest to newest. For example:
10000 regular
20000 directory
30000 special
40000 missing
50000 regular <- last will be the current mirror
"""
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs.sort()
result = ["%s %s" % (time, Manage.get_inc_type(inc))
for time, inc in incpairs]
result.append("%s %s" % (mirror_time, Manage.get_file_type(mirrorrp)))
return "\n".join(result)
def describe_incs_human(incs, mirror_time, mirrorrp):
"""Return a string describing all the the root increments"""
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs.sort()
result = ["Found %d increments:" % len(incpairs)]
for time, inc in incpairs:
result.append(" %s %s" %
(inc.dirsplit()[1], Time.timetopretty(time)))
result.append("Current mirror: %s" % Time.timetopretty(mirror_time))
return "\n".join(result)
def delete_earlier_than(baserp, time):
"""Deleting increments older than time in directory baserp
time is in seconds. It will then delete any empty directories
in the tree. To process the entire backup area, the
rdiff-backup-data directory should be the root of the tree.
"""
baserp.conn.Manage.delete_earlier_than_local(baserp, time)
def delete_earlier_than_local(baserp, time):
"""Like delete_earlier_than, but run on local connection for speed"""
assert baserp.conn is Globals.local_connection
def yield_files(rp):
yield rp
if rp.isdir():
for filename in rp.listdir():
for sub_rp in yield_files(rp.append(filename)):
yield sub_rp
for rp in yield_files(baserp):
if ((rp.isincfile() and
Time.stringtotime(rp.getinctime()) < time) or
(rp.isdir() and not rp.listdir())):
Log("Deleting increment file %s" % rp.path, 5)
rp.delete()
MakeStatic(Manage)
def get_file_type(rp):
"""Returns one of "regular", "directory", "missing", or "special"."""
if not rp.lstat(): return "missing"
elif rp.isdir(): return "directory"
elif rp.isreg(): return "regular"
else: return "special"
def get_inc_type(inc):
"""Return file type increment represents"""
assert inc.isincfile()
type = inc.getinctype()
if type == "dir": return "directory"
elif type == "diff": return "regular"
elif type == "missing": return "missing"
elif type == "snapshot": return get_file_type(inc)
else: assert None, "Unknown type %s" % (type,)
def describe_incs_parsable(incs, mirror_time, mirrorrp):
"""Return a string parsable by computer describing the increments
Each line is a time in seconds of the increment, and then the
type of the file. It will be sorted oldest to newest. For example:
10000 regular
20000 directory
30000 special
40000 missing
50000 regular <- last will be the current mirror
"""
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs.sort()
result = ["%s %s" % (time, get_inc_type(inc)) for time, inc in incpairs]
result.append("%s %s" % (mirror_time, get_file_type(mirrorrp)))
return "\n".join(result)
def describe_incs_human(incs, mirror_time, mirrorrp):
"""Return a string describing all the the root increments"""
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs]
incpairs.sort()
result = ["Found %d increments:" % len(incpairs)]
for time, inc in incpairs:
result.append(" %s %s" %
(inc.dirsplit()[1], Time.timetopretty(time)))
result.append("Current mirror: %s" % Time.timetopretty(mirror_time))
return "\n".join(result)
def delete_earlier_than(baserp, time):
"""Deleting increments older than time in directory baserp
time is in seconds. It will then delete any empty directories
in the tree. To process the entire backup area, the
rdiff-backup-data directory should be the root of the tree.
"""
baserp.conn.manage.delete_earlier_than_local(baserp, time)
def delete_earlier_than_local(baserp, time):
"""Like delete_earlier_than, but run on local connection for speed"""
assert baserp.conn is Globals.local_connection
def yield_files(rp):
yield rp
if rp.isdir():
for filename in rp.listdir():
for sub_rp in yield_files(rp.append(filename)):
yield sub_rp
for rp in yield_files(baserp):
if ((rp.isincfile() and
Time.stringtotime(rp.getinctime()) < time) or
(rp.isdir() and not rp.listdir())):
Log("Deleting increment file %s" % rp.path, 5)
rp.delete()
class IncObj:
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -26,9 +26,8 @@ documentation on what this code does can be found on the man page.
from __future__ import generators
import re
from log import *
from robust import *
import FilenameMapping
from log import Log
import FilenameMapping, robust, rpath, Globals
class SelectError(Exception):
......@@ -81,7 +80,7 @@ class Select:
# This re should not match normal filenames, but usually just globs
glob_re = re.compile("(.*[*?[]|ignorecase\\:)", re.I | re.S)
def __init__(self, rpath, quoted_filenames = None):
def __init__(self, rootrp, quoted_filenames = None):
"""Select initializer. rpath is the root directory
When files have quoted characters in them, quoted_filenames
......@@ -89,9 +88,9 @@ class Select:
version.
"""
assert isinstance(rpath, RPath)
assert isinstance(rootrp, rpath.RPath)
self.selection_functions = []
self.rpath = rpath
self.rpath = rootrp
self.prefix = self.rpath.path
self.quoting_on = Globals.quoting_enabled and quoted_filenames
......@@ -141,8 +140,8 @@ class Select:
and should be included iff something inside is included.
"""
for filename in Robust.listrp(rpath):
new_rpath = Robust.check_common_error(error_handler,
for filename in robust.listrp(rpath):
new_rpath = robust.check_common_error(error_handler,
rpath.append, (filename,))
if new_rpath:
s = sel_func(new_rpath)
......@@ -204,12 +203,12 @@ class Select:
return None
if self.quoting_on:
for subdir in FilenameMapping.get_quoted_dir_children(rpath):
for subdir in get_quoted_dir_children(rpath):
for rp in rec_func(subdir, rec_func, sel_func):
yield rp
else:
for filename in Robust.listrp(rpath):
new_rp = Robust.check_common_error(
for filename in robust.listrp(rpath):
new_rp = robust.check_common_error(
error_handler, rpath.append, [filename])
if new_rp:
for rp in rec_func(new_rp, rec_func, sel_func):
......@@ -646,3 +645,22 @@ probably isn't what you meant.""" %
return res
def get_quoted_dir_children(rpath):
"""For rpath directory, return list of quoted children in dir
This used to be in FilenameMapping, but was moved because it
depends on the robust.listrp routine.
"""
if not rpath.isdir(): return []
dir_pairs = [(FilenameMapping.unquote(filename), filename)
for filename in robust.listrp(rpath)]
dir_pairs.sort() # sort by real index, not quoted part
child_list = []
for unquoted, filename in dir_pairs:
childrp = rpath.append(unquoted)
childrp.quote_path()
child_list.append(childrp)
return child_list
......@@ -19,9 +19,8 @@
"""Generate and process aggregated backup information"""
from lazy import *
import re
import re, os
import Globals, TempFile, robust, Time, rorpiter
class StatsException(Exception): pass
......@@ -216,12 +215,12 @@ class StatsObj:
def write_stats_to_rp(self, rp):
"""Write statistics string to given rpath"""
tf = TempFileManager.new(rp)
tf = TempFile.new(rp)
def init_thunk():
fp = tf.open("w")
fp.write(self.get_stats_string())
fp.close()
Robust.make_tf_robustaction(init_thunk, (tf,), (rp,)).execute()
robust.make_tf_robustaction(init_thunk, (tf,), (rp,)).execute()
def read_stats_from_rp(self, rp):
"""Set statistics from rpath, return self for convenience"""
......@@ -264,7 +263,7 @@ class StatsObj:
return s
class StatsITRB(ITRBranch, StatsObj):
class ITRB(rorpiter.ITRBranch, StatsObj):
"""Keep track of per directory statistics
This is subclassed by the mirroring and incrementing ITRs.
......@@ -339,7 +338,6 @@ class StatsITRB(ITRBranch, StatsObj):
self.__dict__[attr] += branch.__dict__[attr]
from log import *
from increment import *
from robust import *
import Globals
"""commontest - Some functions and constants common to several test cases"""
import os, sys
from rdiff_backup.rpath import *
from rdiff_backup.destructive_stepping import *
from rdiff_backup.highlevel import *
from rdiff_backup import Globals, Hardlink, SetConnections, Main
from rdiff_backup.log import Log
from rdiff_backup.rpath import RPath
from rdiff_backup import Globals, Hardlink, SetConnections, Main, \
selection, highlevel, lazy, Time, rpath
SourceDir = "../src"
AbsCurdir = os.getcwd() # Absolute path name of current directory
......@@ -13,7 +13,7 @@ __no_execute__ = 1 # Keeps the actual rdiff-backup program from running
def Myrm(dirstring):
"""Run myrm on given directory string"""
assert not os.system("%s/myrm %s" % (MiscDir, dirstring))
assert not os.system("rm -rf %s" % (dirstring,))
def Make():
"""Make sure the rdiff-backup script in the source dir is up-to-date"""
......@@ -96,8 +96,8 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir):
"""
# Save attributes of root to restore later
src_root = RPath(Globals.local_connection, src_dir)
dest_root = RPath(Globals.local_connection, dest_dir)
src_root = rpath.RPath(Globals.local_connection, src_dir)
dest_root = rpath.RPath(Globals.local_connection, dest_dir)
dest_rbdir = dest_root.append("rdiff-backup-data")
dest_incdir = dest_rbdir.append("increments")
......@@ -109,9 +109,9 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir):
InternalBackup(source_local, dest_local, src_dir, dest_dir)
dest_root.setdata()
dest_rbdir.delete()
Myrm(dest_rbdir.path)
# Restore old attributes
RPathStatic.copy_attribs(src_root, dest_root)
rpath.copy_attribs(src_root, dest_root)
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
"""Restore mirror_dir to dest_dir at given time
......@@ -133,7 +133,7 @@ def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
mirror_rp, dest_rp = cmd_schemas2rps([mirror_dir, dest_dir], remote_schema)
Time.setcurtime()
inc = get_increment_rp(mirror_rp, time)
if inc: Main.restore(get_increment_rp(mirror_rp, time), dest_rp)
if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp)
else: # use alternate syntax
Main.restore_timestr = str(time)
Main.RestoreAsOf(mirror_rp, dest_rp)
......@@ -173,7 +173,8 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
Log("Comparing %s and %s, hardlinks %s" % (src_rp.path, dest_rp.path,
compare_hardlinks), 3)
src_select, dest_select = Select(src_rp), Select(dest_rp)
src_select = selection.Select(src_rp)
dest_select = selection.Select(dest_rp)
if ignore_tmp_files:
# Ignoring temp files can be useful when we want to check the
......@@ -231,16 +232,17 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
Hardlink.get_indicies(dest_rorp, None)), 3)
return None
if equality_func: result = Iter.equal(dsiter1, dsiter2, 1, equality_func)
if equality_func: result = lazy.Iter.equal(dsiter1, dsiter2,
1, equality_func)
elif compare_hardlinks:
dsiter1 = Hardlink.add_rorp_iter(dsiter1, 1)
dsiter2 = Hardlink.add_rorp_iter(dsiter2, None)
if exclude_rbdir:
result = Iter.equal(dsiter1, dsiter2, 1, hardlink_equal)
else: result = Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
result = lazy.Iter.equal(dsiter1, dsiter2, 1, hardlink_equal)
else: result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
elif not exclude_rbdir:
result = Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
else: result = Iter.equal(dsiter1, dsiter2, 1)
result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
else: result = lazy.Iter.equal(dsiter1, dsiter2, 1)
for i in dsiter1: pass # make sure all files processed anyway
for i in dsiter2: pass
......@@ -269,12 +271,12 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
"""
Globals.set('preserve_hardlinks', compare_hardlinks)
time = 10000
dest_rp = RPath(Globals.local_connection, dest_dirname)
restore_rp = RPath(Globals.local_connection, restore_dirname)
dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
restore_rp = rpath.RPath(Globals.local_connection, restore_dirname)
os.system(MiscDir + "/myrm " + dest_dirname)
Myrm(dest_dirname)
for dirname in list_of_dirnames:
src_rp = RPath(Globals.local_connection, dirname)
src_rp = rpath.RPath(Globals.local_connection, dirname)
reset_hardlink_dicts()
_reset_connections(src_rp, dest_rp)
......@@ -287,10 +289,10 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
time = 10000
for dirname in list_of_dirnames[:-1]:
reset_hardlink_dicts()
os.system(MiscDir + "/myrm " + restore_dirname)
Myrm(restore_dirname)
InternalRestore(dest_local, source_local, dest_dirname,
restore_dirname, time)
src_rp = RPath(Globals.local_connection, dirname)
src_rp = rpath.RPath(Globals.local_connection, dirname)
assert CompareRecursive(src_rp, restore_rp)
# Restore should default back to newest time older than it
......@@ -304,11 +306,11 @@ def MirrorTest(source_local, dest_local, list_of_dirnames,
dest_dirname = "testfiles/output"):
"""Mirror each of list_of_dirnames, and compare after each"""
Globals.set('preserve_hardlinks', compare_hardlinks)
dest_rp = RPath(Globals.local_connection, dest_dirname)
dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
os.system(MiscDir + "/myrm " + dest_dirname)
Myrm(dest_dirname)
for dirname in list_of_dirnames:
src_rp = RPath(Globals.local_connection, dirname)
src_rp = rpath.RPath(Globals.local_connection, dirname)
reset_hardlink_dicts()
_reset_connections(src_rp, dest_rp)
......
import unittest, types, tempfile, os, sys
from commontest import *
from rdiff_backup.connection import *
from rdiff_backup import Globals
from rdiff_backup import Globals, rpath
class LocalConnectionTest(unittest.TestCase):
"""Test the dummy connection"""
......@@ -104,7 +104,7 @@ class PipeConnectionTest(unittest.TestCase):
"""Test module emulation"""
assert type(self.conn.tempfile.mktemp()) is types.StringType
assert self.conn.os.path.join("a", "b") == "a/b"
rp1 = RPath(self.conn, self.regfilename)
rp1 = rpath.RPath(self.conn, self.regfilename)
assert rp1.isreg()
def testVirtualFiles(self):
......@@ -112,17 +112,17 @@ class PipeConnectionTest(unittest.TestCase):
tempout = self.conn.open("testfiles/tempout", "w")
assert isinstance(tempout, VirtualFile)
regfilefp = open(self.regfilename, "r")
RPath.copyfileobj(regfilefp, tempout)
rpath.copyfileobj(regfilefp, tempout)
tempout.close()
regfilefp.close()
tempoutlocal = open("testfiles/tempout", "r")
regfilefp = open(self.regfilename, "r")
assert RPath.cmpfileobj(regfilefp, tempoutlocal)
assert rpath.cmpfileobj(regfilefp, tempoutlocal)
tempoutlocal.close()
regfilefp.close()
os.unlink("testfiles/tempout")
assert RPath.cmpfileobj(self.conn.open(self.regfilename, "r"),
assert rpath.cmpfileobj(self.conn.open(self.regfilename, "r"),
open(self.regfilename, "r"))
def testString(self):
......@@ -139,7 +139,8 @@ class PipeConnectionTest(unittest.TestCase):
def testRPaths(self):
"""Test transmission of rpaths"""
rp = RPath(self.conn, "testfiles/various_file_types/regular_file")
rp = rpath.RPath(self.conn,
"testfiles/various_file_types/regular_file")
assert self.conn.reval("lambda rp: rp.data", rp) == rp.data
assert self.conn.reval("lambda rp: rp.conn is Globals.local_connection", rp)
......@@ -192,7 +193,7 @@ class RedirectedConnectionTest(unittest.TestCase):
def testRpaths(self):
"""Test moving rpaths back and forth across connections"""
rp = RPath(self.conna, "foo")
rp = rpath.RPath(self.conna, "foo")
self.connb.Globals.set("tmp_rpath", rp)
rp_returned = self.connb.Globals.get("tmp_rpath")
assert rp_returned.conn is rp.conn
......
import unittest
from commontest import *
from rdiff_backup.C import *
from rdiff_backup import C
from rdiff_backup.rpath import *
class CTest(unittest.TestCase):
......
from __future__ import generators
import unittest
from commontest import *
from rdiff_backup.rpath import *
from rdiff_backup.selection import *
from rdiff_backup import Globals
from rdiff_backup import rpath, selection, Globals, destructive_stepping
Log.setverbosity(4)
class DSTest(unittest.TestCase):
def setUp(self):
self.lc = Globals.local_connection
self.noperms = RPath(self.lc, "testfiles/noperms")
self.noperms = rpath.RPath(self.lc, "testfiles/noperms")
Globals.change_source_perms = 1
self.iteration_dir = RPath(self.lc, "testfiles/iteration-test")
self.iteration_dir = rpath.RPath(self.lc, "testfiles/iteration-test")
def testDSIter(self):
"""Testing destructive stepping iterator from baserp"""
for i in range(2):
sel = Select(DSRPath(1, self.noperms)).set_iter()
sel = selection.Select(destructive_stepping.
DSRPath(1, self.noperms)).set_iter()
ds_iter = sel.iterate_with_finalizer()
noperms = ds_iter.next()
assert noperms.isdir() and noperms.getperms() == 0, \
......
import unittest, os, re, sys
import unittest, os, re, sys, time
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.rpath import *
from rdiff_backup import Globals
from rdiff_backup import Globals, log, rpath
"""Regression tests"""
Globals.exclude_mirror_regexps = [re.compile(".*/rdiff-backup-data")]
Log.setverbosity(7)
log.Log.setverbosity(7)
lc = Globals.local_connection
......@@ -15,7 +13,7 @@ class Local:
"""This is just a place to put increments relative to the local
connection"""
def get_local_rp(extension):
return RPath(Globals.local_connection, "testfiles/" + extension)
return rpath.RPath(Globals.local_connection, "testfiles/" + extension)
vftrp = get_local_rp('various_file_types')
inc1rp = get_local_rp('increment1')
......@@ -154,7 +152,7 @@ class PathSetter(unittest.TestCase):
"testfiles/output/rdiff-backup-data/increments")
self.exec_rb(None, timbar_paths[0])
self.refresh(Local.timbar_in, Local.timbar_out)
assert RPath.cmp_with_attribs(Local.timbar_in, Local.timbar_out)
assert rpath.cmp_with_attribs(Local.timbar_in, Local.timbar_out)
self.exec_rb_restore(25000, 'testfiles/output/various_file_types',
'testfiles/vft2_out')
......@@ -173,8 +171,8 @@ class PathSetter(unittest.TestCase):
incfiles = filter(lambda s: s.startswith(basename),
os.listdir(directory))
incfiles.sort()
incrps = map(lambda f: RPath(lc, directory+"/"+f), incfiles)
return map(lambda x: x.path, filter(RPath.isincfile, incrps))
incrps = map(lambda f: rpath.RPath(lc, directory+"/"+f), incfiles)
return map(lambda x: x.path, filter(rpath.RPath.isincfile, incrps))
class Final(PathSetter):
......@@ -287,7 +285,7 @@ testfiles/increment2/changed_dir""")
"testfiles/output/changed_dir/foo")
# Test selective restoring
mirror_rp = RPath(Globals.local_connection, "testfiles/output")
mirror_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
restore_filename = get_increment_rp(mirror_rp, 10000).path
assert not os.system(self.rb_schema +
"--include testfiles/restoretarget1/various_file_types/"
......@@ -321,8 +319,8 @@ testfiles/increment2/changed_dir""")
# Make an exclude list
os.mkdir("testfiles/vft_out")
excluderp = RPath(Globals.local_connection,
"testfiles/vft_out/exclude")
excluderp = rpath.RPath(Globals.local_connection,
"testfiles/vft_out/exclude")
fp = excluderp.open("w")
fp.write("""
../testfiles/various_file_types/regular_file
......@@ -331,8 +329,8 @@ testfiles/increment2/changed_dir""")
assert not fp.close()
# Make an include list
includerp = RPath(Globals.local_connection,
"testfiles/vft_out/include")
includerp = rpath.RPath(Globals.local_connection,
"testfiles/vft_out/include")
fp = includerp.open("w")
fp.write("""
../testfiles/various_file_types/executable
......
......@@ -6,7 +6,7 @@ class RemoteMirrorTest(unittest.TestCase):
"""Test mirroring"""
def setUp(self):
"""Start server"""
Log.setverbosity(7)
Log.setverbosity(3)
Globals.change_source_perms = 1
SetConnections.UpdateGlobal('checkpoint_interval', 3)
......
import unittest, os
import unittest, os, re, time
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.rpath import *
from rdiff_backup.restore import *
from rdiff_backup import log, rpath, restore, increment, Time, \
Rdiff, statistics
lc = Globals.local_connection
Globals.change_source_perms = 1
Log.setverbosity(3)
def getrp(ending):
return RPath(lc, "testfiles/various_file_types/" + ending)
return rpath.RPath(lc, "testfiles/various_file_types/" + ending)
rf = getrp("regular_file")
rf2 = getrp("two_hardlinked_files1")
......@@ -22,11 +21,11 @@ dir = getrp(".")
sym = getrp("symbolic_link")
nothing = getrp("nothing")
target = RPath(lc, "testfiles/out")
out2 = RPath(lc, "testfiles/out2")
out_gz = RPath(lc, "testfiles/out.gz")
target = rpath.RPath(lc, "testfiles/out")
out2 = rpath.RPath(lc, "testfiles/out2")
out_gz = rpath.RPath(lc, "testfiles/out.gz")
Time.setprevtime(999424113.24931)
Time.setprevtime(999424113)
prevtimestr = "2001-09-02T02:48:33-07:00"
t_pref = "testfiles/out.2001-09-02T02:48:33-07:00"
t_diff = "testfiles/out.2001-09-02T02:48:33-07:00.diff"
......@@ -39,78 +38,72 @@ class inctest(unittest.TestCase):
def setUp(self):
Globals.set('isbackup_writer',1)
def check_time(self, rp):
"""Make sure that rp is an inc file, and time is Time.prevtime"""
assert rp.isincfile(), rp
t = Time.stringtotime(rp.getinctime())
assert t == Time.prevtime, (t, Time.prevtime)
def testreg(self):
"""Test increment of regular files"""
Globals.compression = None
target.setdata()
if target.lstat(): target.delete()
rpd = RPath(lc, t_diff)
rpd = rpath.RPath(lc, t_diff)
if rpd.lstat(): rpd.delete()
Inc.Increment(rf, exec1, target)
rpd.setdata()
assert rpd.isreg(), rpd
assert RPath.cmp_attribs(rpd, exec1)
rpd.delete()
diffrp = increment.Increment(rf, exec1, target)
assert diffrp.isreg(), diffrp
assert rpath.cmp_attribs(diffrp, exec1)
self.check_time(diffrp)
assert diffrp.getinctype() == 'diff', diffrp.getinctype()
diffrp.delete()
def testmissing(self):
"""Test creation of missing files"""
Inc.Increment(rf, nothing, target)
rp = RPath(lc, t_pref + ".missing")
assert rp.lstat()
rp.delete()
missing_rp = increment.Increment(rf, nothing, target)
self.check_time(missing_rp)
assert missing_rp.getinctype() == 'missing'
missing_rp.delete()
def testsnapshot(self):
"""Test making of a snapshot"""
Globals.compression = None
rp = RPath(lc, t_pref + ".snapshot")
if rp.lstat(): rp.delete()
Inc.Increment(rf, sym, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, sym)
assert RPath.cmp(rp, sym)
rp.delete()
rp = RPath(lc, t_pref + ".snapshot")
if rp.lstat(): rp.delete()
Inc.Increment(sym, rf, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf)
assert RPath.cmp(rp, rf)
rp.delete()
snap_rp = increment.Increment(rf, sym, target)
self.check_time(snap_rp)
assert rpath.cmp_attribs(snap_rp, sym)
assert rpath.cmp(snap_rp, sym)
snap_rp.delete()
snap_rp2 = increment.Increment(sym, rf, target)
self.check_time(snap_rp2)
assert rpath.cmp_attribs(snap_rp2, rf)
assert rpath.cmp(snap_rp2, rf)
snap_rp2.delete()
def testGzipsnapshot(self):
"""Test making a compressed snapshot"""
Globals.compression = 1
rp = RPath(lc, t_pref + ".snapshot")
if rp.lstat(): rp.delete()
Inc.Increment(rf, sym, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, sym)
assert RPath.cmp(rp, sym)
rp = increment.Increment(rf, sym, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, sym)
assert rpath.cmp(rp, sym)
rp.delete()
rp = RPath(lc, t_pref + ".snapshot.gz")
if rp.lstat(): rp.delete()
Inc.Increment(sym, rf, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf)
assert RPath.cmpfileobj(rp.open("rb", 1), rf.open("rb"))
rp = increment.Increment(sym, rf, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, rf)
assert rpath.cmpfileobj(rp.open("rb", 1), rf.open("rb"))
assert rp.isinccompressed()
rp.delete()
def testdir(self):
"""Test increment on dir"""
Inc.Increment(sym, dir, target)
rp = RPath(lc, t_pref + ".dir")
rp2 = RPath(lc, t_pref)
rp = increment.Increment(sym, dir, target)
self.check_time(rp)
assert rp.lstat()
assert target.isdir()
assert RPath.cmp_attribs(dir, rp)
assert rpath.cmp_attribs(dir, rp)
assert rp.isreg()
rp.delete()
target.delete()
......@@ -118,46 +111,36 @@ class inctest(unittest.TestCase):
def testDiff(self):
"""Test making diffs"""
Globals.compression = None
rp = RPath(lc, t_pref + '.diff')
if rp.lstat(): rp.delete()
Inc.Increment(rf, rf2, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf2)
rp = increment.Increment(rf, rf2, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2).execute()
assert RPath.cmp(rf2, out2)
assert rpath.cmp(rf2, out2)
rp.delete()
out2.delete()
def testGzipDiff(self):
"""Test making gzipped diffs"""
Globals.compression = 1
rp = RPath(lc, t_pref + '.diff.gz')
if rp.lstat(): rp.delete()
Inc.Increment(rf, rf2, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, rf2)
rp = increment.Increment(rf, rf2, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2, delta_compressed = 1).execute()
assert RPath.cmp(rf2, out2)
assert rpath.cmp(rf2, out2)
rp.delete()
out2.delete()
def testGzipRegexp(self):
"""Here a .gz file shouldn't be compressed"""
Globals.compression = 1
RPath.copy(rf, out_gz)
rpath.copy(rf, out_gz)
assert out_gz.lstat()
rp = RPath(lc, t_pref + '.diff')
if rp.lstat(): rp.delete()
Inc.Increment(rf, out_gz, target)
rp.setdata()
assert rp.lstat()
assert RPath.cmp_attribs(rp, out_gz)
rp = increment.Increment(rf, out_gz, target)
self.check_time(rp)
assert rpath.cmp_attribs(rp, out_gz)
Rdiff.patch_action(rf, rp, out2).execute()
assert RPath.cmp(out_gz, out2)
assert rpath.cmp(out_gz, out2)
rp.delete()
out2.delete()
out_gz.delete()
......@@ -194,8 +177,8 @@ class inctest2(unittest.TestCase):
InternalBackup(1, 1, "testfiles/stattest2", "testfiles/output",
time.time()+1)
rbdir = RPath(Globals.local_connection,
"testfiles/output/rdiff-backup-data")
rbdir = rpath.RPath(Globals.local_connection,
"testfiles/output/rdiff-backup-data")
#incs = Restore.get_inclist(rbdir.append("subdir").
# append("directory_statistics"))
......@@ -217,14 +200,14 @@ class inctest2(unittest.TestCase):
#assert 400000 < subdir_stats.ChangedMirrorSize < 420000
#assert 10 < subdir_stats.IncrementFileSize < 20000
incs = Restore.get_inclist(rbdir.append("session_statistics"))
incs = restore.get_inclist(rbdir.append("session_statistics"))
assert len(incs) == 2
s2 = StatsObj().read_stats_from_rp(incs[0])
s2 = statistics.StatsObj().read_stats_from_rp(incs[0])
assert s2.SourceFiles == 7
assert 700000 < s2.SourceFileSize < 750000
self.stats_check_initial(s2)
root_stats = StatsObj().read_stats_from_rp(incs[1])
root_stats = statistics.StatsObj().read_stats_from_rp(incs[1])
assert root_stats.SourceFiles == 7, root_stats.SourceFiles
assert 550000 < root_stats.SourceFileSize < 570000
assert root_stats.MirrorFiles == 7
......
import unittest, StringIO
from commontest import *
from rdiff_backup.iterfile import *
from rdiff_backup import lazy
class testIterFile(unittest.TestCase):
......@@ -11,8 +12,8 @@ class testIterFile(unittest.TestCase):
def testConversion(self):
"""Test iter to file conversion"""
for itm in [self.iter1maker, self.iter2maker]:
assert Iter.equal(itm(),
IterWrappingFile(FileWrappingIter(itm())))
assert lazy.Iter.equal(itm(),
IterWrappingFile(FileWrappingIter(itm())))
class testBufferedRead(unittest.TestCase):
def testBuffering(self):
......
This diff is collapsed.
import unittest, os, cStringIO, time
from rdiff_backup.metadata import *
from rdiff_backup import rpath, Globals, selection, destructive_stepping
from rdiff_backup import rpath, connection, Globals, selection, \
destructive_stepping
tempdir = rpath.RPath(Globals.local_connection, "testfiles/output")
......@@ -61,9 +62,8 @@ class MetadataTest(unittest.TestCase):
if temprp.lstat(): return temprp
self.make_temp()
root = rpath.RPath(Globals.local_connection, "testfiles/bigdir")
dsrp_root = destructive_stepping.DSRPath(1, root)
rpath_iter = selection.Select(dsrp_root).set_iter()
rootrp = rpath.RPath(Globals.local_connection, "testfiles/bigdir")
rpath_iter = selection.Select(rootrp).set_iter()
start_time = time.time()
OpenMetadata(temprp)
......
This diff is collapsed.
import unittest
from commontest import *
from rdiff_backup.log import *
from rdiff_backup.restore import *
from rdiff_backup import Globals
from rdiff_backup import log, restore, Globals, rpath
Log.setverbosity(3)
......@@ -23,26 +21,26 @@ class RestoreTest(unittest.TestCase):
dirlist = os.listdir(self.prefix)
dirlist.sort()
baselist = filter(lambda f: f.startswith(basename), dirlist)
rps = map(lambda f: RPath(lc, self.prefix+f), baselist)
rps = map(lambda f: rpath.RPath(lc, self.prefix+f), baselist)
incs = filter(lambda rp: rp.isincfile(), rps)
tuples = map(lambda rp: (rp, RPath(lc, "%s.%s" %
(rp.getincbase().path,
rp.getinctime()))),
tuples = map(lambda rp: (rp, rpath.RPath(lc, "%s.%s" %
(rp.getincbase().path,
rp.getinctime()))),
incs)
return tuples, incs
def restoreonefiletest(self, basename):
tuples, incs = self.maketesttuples(basename)
rpbase = RPath(lc, self.prefix + basename)
rptarget = RPath(lc, "testfiles/outfile")
rpbase = rpath.RPath(lc, self.prefix + basename)
rptarget = rpath.RPath(lc, "testfiles/outfile")
for pair in tuples:
print "Processing file " + pair[0].path
if rptarget.lstat(): rptarget.delete()
rest_time = Time.stringtotime(pair[0].getinctime())
rid = RestoreIncrementData((), rpbase, incs)
rid = restore.RestoreIncrementData((), rpbase, incs)
rid.sortincseq(rest_time, 10000000000) # pick some really late time
rcd = RestoreCombinedData(rid, rpbase, rptarget)
rcd = restore.RestoreCombinedData(rid, rpbase, rptarget)
rcd.RestoreFile()
#sorted_incs = Restore.sortincseq(rest_time, incs)
#Restore.RestoreFile(rest_time, rpbase, (), sorted_incs, rptarget)
......@@ -50,9 +48,9 @@ class RestoreTest(unittest.TestCase):
if not rptarget.lstat(): assert not pair[1].lstat()
elif not pair[1].lstat(): assert not rptarget.lstat()
else:
assert RPath.cmp(rptarget, pair[1]), \
assert rpath.cmp(rptarget, pair[1]), \
"%s %s" % (rptarget.path, pair[1].path)
assert RPath.cmp_attribs(rptarget, pair[1]), \
assert rpath.cmp_attribs(rptarget, pair[1]), \
"%s %s" % (rptarget.path, pair[1].path)
rptarget.delete()
......@@ -75,7 +73,7 @@ class RestoreTest(unittest.TestCase):
for inc, incbase in tuples:
assert inc.isincfile()
inctime = Time.stringtotime(inc.getinctime())
rid1 = RestoreIncrementData(basename, incbase, incs)
rid1 = restore.RestoreIncrementData(basename, incbase, incs)
rid1.sortincseq(inctime, mirror_time)
assert rid1.inc_list, rid1.inc_list
# oldest increment should be exactly inctime
......@@ -97,8 +95,8 @@ class RestoreTest(unittest.TestCase):
InternalRestore(1, 1, "testfiles/restoretest3",
"testfiles/output", 20000)
src_rp = RPath(Globals.local_connection, "testfiles/increment2")
restore_rp = RPath(Globals.local_connection, "testfiles/output")
src_rp = rpath.RPath(Globals.local_connection, "testfiles/increment2")
restore_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
assert CompareRecursive(src_rp, restore_rp)
def testRestoreCorrupt(self):
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment