Commit fe98b295 authored by bescoto's avatar bescoto

Various changes for v0.11.1 (see CHANGELOG)


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@256 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent d551bdbe
New in v0.11.1 (2002/12/??) New in v0.11.1 (2002/12/31)
--------------------------- ---------------------------
**Warning** Various features have been removed from this version, so **Warning** Various features have been removed from this version, so
...@@ -27,8 +27,15 @@ The following features have been removed: ...@@ -27,8 +27,15 @@ The following features have been removed:
still generated, the directory statistics file no longer is, still generated, the directory statistics file no longer is,
because the new code structure makes it less inconvenient. because the new code structure makes it less inconvenient.
The various --exclude and --include options no longer work when
restoring. This may be added later if there is demand.
--windows-mode and filename quoting doesn't work. There have been
several requests for this in the past, so it will probably be
re-added in the next version.
Extensive refactoring. A lot of rdiff-backup's code was structured as Extensive refactoring. A lot of rdiff-backup's code was structured as
if it was still in one file, so it didn't make enough use of Python's if it were still in one file, so it didn't make enough use of Python's
module system. module system.
Now rdiff-backup writes metadata (uid, gid, mtime, etc.) to a Now rdiff-backup writes metadata (uid, gid, mtime, etc.) to a
...@@ -42,9 +49,13 @@ some ramifications: ...@@ -42,9 +49,13 @@ some ramifications:
metadata, so it may not be necessary to traverse the whole mirror metadata, so it may not be necessary to traverse the whole mirror
directory. This can reduce file access on the destination side. directory. This can reduce file access on the destination side.
Even when the --no-hard-links option is given when backing up,
link relationships can be restored properly. However, if this
option is given, mirror files will not be linked together.
Special file types like device and sockets which cannot be created
on the remote side for some reason can still be backed up and
restored properly.
Fixed bug with the --{include|exclude}-globbing-filelist options Fixed bug with the --{include|exclude}-globbing-filelist options
(reported by Claus Herwig). (reported by Claus Herwig).
...@@ -54,7 +65,8 @@ given date, and added Bud Bruegger's patch to that. The format and ...@@ -54,7 +65,8 @@ given date, and added Bud Bruegger's patch to that. The format and
information this option provides will probably change in the near information this option provides will probably change in the near
future. future.
Restoring is now pipelined for better high latency performance, and
unchanged files in the target directory will not be recopied.
New in v0.11.0 (2002/10/05) New in v0.11.0 (2002/10/05)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
import os, re, shutil, time, sys, getopt import os, re, shutil, time, sys, getopt
SourceDir = "src" SourceDir = "rdiff_backup"
DistDir = "dist" DistDir = "dist"
# Various details about the files must also be specified by the rpm # Various details about the files must also be specified by the rpm
...@@ -89,15 +89,15 @@ def MakeTar(): ...@@ -89,15 +89,15 @@ def MakeTar():
assert not os.system("cp %s %s" % (filename, tardir)), filename assert not os.system("cp %s %s" % (filename, tardir)), filename
os.mkdir(tardir+"/rdiff_backup") os.mkdir(tardir+"/rdiff_backup")
for filename in ["connection.py", "destructive_stepping.py", for filename in ["backup.py", "connection.py",
"FilenameMapping.py", "Hardlink.py", "FilenameMapping.py", "Hardlink.py",
"highlevel.py", "increment.py", "__init__.py", "increment.py", "__init__.py", "iterfile.py",
"iterfile.py", "lazy.py", "librsync.py", "lazy.py", "librsync.py", "log.py", "Main.py",
"log.py", "Main.py", "manage.py", "MiscStats.py", "manage.py", "metadata.py", "Rdiff.py",
"Rdiff.py", "restore.py", "rlist.py", "restore.py", "robust.py", "rorpiter.py",
"robust.py", "rorpiter.py", "rpath.py", "Security.py", "rpath.py", "Security.py", "selection.py",
"selection.py", "SetConnections.py", "static.py", "SetConnections.py", "static.py",
"statistics.py", "Time.py"]: "statistics.py", "TempFile.py", "Time.py"]:
assert not os.system("cp %s/%s %s/rdiff_backup" % assert not os.system("cp %s/%s %s/rdiff_backup" %
(SourceDir, filename, tardir)), filename (SourceDir, filename, tardir)), filename
...@@ -137,10 +137,10 @@ def parse_cmdline(arglist): ...@@ -137,10 +137,10 @@ def parse_cmdline(arglist):
def Main(): def Main():
action = parse_cmdline(sys.argv[1:]) action = parse_cmdline(sys.argv[1:])
if action == "FAQ": print "Making FAQ"
print "Making FAQ" MakeFAQ()
MakeFAQ()
else: if action != "FAQ":
assert action == "All" assert action == "All"
print "Processing version " + Version print "Processing version " + Version
tarfile = MakeTar() tarfile = MakeTar()
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
import os, sys, re import os, sys, re
SourceDir = "src" rpmroot = "/home/ben/rpm"
if len(sys.argv) == 2: if len(sys.argv) == 2:
version = sys.argv[1] version = sys.argv[1]
...@@ -20,15 +20,9 @@ tarfile = "-".join(base.split("-")[:-1]) + ".tar.gz" ...@@ -20,15 +20,9 @@ tarfile = "-".join(base.split("-")[:-1]) + ".tar.gz"
# These assume the rpm root directory $HOME/rpm. The # These assume the rpm root directory $HOME/rpm. The
# nonstandard location allows for building by non-root user. # nonstandard location allows for building by non-root user.
assert not os.system("cp %s $HOME/rpm/SOURCES" % (tarfile,)) assert not os.system("cp %s %s/SOURCES" % (tarfile, rpmroot))
assert not os.system("rpm -ba --sign -vv --target i386 " + specfile) #assert not os.system("rpm -ba --sign -vv --target i386 " + specfile)
assert not os.system("cp $HOME/rpm/RPMS/i386/%s ." % i386rpm) assert not os.system("rpmbuild -ba -v " + specfile)
assert not os.system("cp $HOME/rpm/SRPMS/%s ." % source_rpm) assert not os.system("cp %s/RPMS/i386/%s ." % (rpmroot, i386rpm))
assert not os.system("cp %s/SRPMS/%s ." % (rpmroot, source_rpm))
# Old root RPM instructions
#assert not os.system("install -o root -g root -m 644 %s "
# "/usr/src/redhat/SOURCES" % (tarfile,))
#assert not os.system("install -o ben -g ben -m 644 "
# "/usr/src/redhat/RPMS/i386/%s ." % i386rpm)
#assert not os.system("install -o ben -g ben -m 644 "
# "/usr/src/redhat/SRPMS/%s ." % source_rpm)
...@@ -217,16 +217,12 @@ The default is ...@@ -217,16 +217,12 @@ The default is
"(?i).*\\.(gz|z|bz|bz2|tgz|zip|rpm|deb|jpg|gif|png|jp2|mp3|ogg|avi|wmv|mpeg|mpg|rm|mov)$" "(?i).*\\.(gz|z|bz|bz2|tgz|zip|rpm|deb|jpg|gif|png|jp2|mp3|ogg|avi|wmv|mpeg|mpg|rm|mov)$"
.TP .TP
.BI --no-hard-links .BI --no-hard-links
Don't preserve hard links from source to mirror directories. Don't replicate hard links on destination side. Note that because
Otherwise, no increment files will themselves be hard linked, but a metadata is written to a separate file, hard link information will not
hard link database will be written so that hard links from any dataset be lost even if the --no-hard-links option is given (however, mirror
will be recreated if originally present. If many hard linked files files will not be linked). If many hard-linked files are present,
are present, this option can drastically decrease memory usage. this option can drastically increase memory usage.
.TP ..TP
.B --no-resume
Do not resume last aborted backup even if it falls within the resume
window.
.TP
.B --null-separator .B --null-separator
Use nulls (\\0) instead of newlines (\\n) as line separators, which Use nulls (\\0) instead of newlines (\\n) as line separators, which
may help when dealing with filenames containing newlines. This may help when dealing with filenames containing newlines. This
......
...@@ -21,12 +21,12 @@ ...@@ -21,12 +21,12 @@
If the preserve_hardlinks option is selected, linked files in the If the preserve_hardlinks option is selected, linked files in the
source directory will be linked in the mirror directory. Linked files source directory will be linked in the mirror directory. Linked files
are treated like any other with respect to incrementing, but a are treated like any other with respect to incrementing, but their
database of all links will be recorded at each session, so linked link status can be retrieved because their device location and inode #
files can still be restored from the increments. is written in the metadata file.
All these functions are meant to be executed on the destination All these functions are meant to be executed on the mirror side. The
side. The source side should only transmit inode information. source side should only transmit inode information.
""" """
...@@ -67,58 +67,6 @@ def clear_dictionaries(): ...@@ -67,58 +67,6 @@ def clear_dictionaries():
_src_index_indicies = _dest_index_indicies = _restore_index_path = None _src_index_indicies = _dest_index_indicies = _restore_index_path = None
# The keys of this dictionary are (inode, devloc) pairs on the source
# side. The values are (numlinks, index) pairs, where numlinks are
# the number of files currently linked to this spot, and index is the
# index of the first file so linked.
_src_inode_index_dict = {}
_dest_inode_index_dict = {}
#def rorp_eq(src_rorp, dest_rorp):
# """Return true if source and dest rorp are equal as far as hardlinking
#
# This also processes the src_rorp, adding it if necessary to the
# inode dictionary.
#
# """
# if not src_rorp.isreg(): return 1 # only reg files can be hard linked
# if src_rorp.getnumlinks() == 1: return dest_rorp.getnumlinks() == 1
#
# src_linked_index = process_rorp(src_rorp, _src_inode_index_dict)
# if dest_rorp.getnumlinks() == 1: return 0
# dest_linked_index = process_rorp(dest_rorp, _dest_inode_index_dict)
# return src_linked_index == dest_linked_index
def process_rorp(rorp, inode_dict):
"""Add inode info and returns index src_rorp is linked to, or None"""
key_pair = (rorp.getinode(), rorp.getdevloc())
try: num, linked_index = inode_dict[key_pair]
except KeyError:
inode_dict[key_pair] = (1, src_rorp.index)
return None
inode_dict[key_pair] = (num+1, linked_index)
if num+1 == src_rorp.getnumlinks(): del _inode_index_dict[key_pair]
else: _inode_index_dict[key_pair] = (num+1, linked_index)
return linked_index
def get_linked_index(src_rorp):
"""Return the index a src_rorp is linked to, or None
Also deletes the src_rorp's entry in the dictionary if we have
accumulated all the hard link references.
"""
key_pair = (rorp.getinode(), rorp.getdevloc())
try: num, linked_index = _src_inode_index_dict[key_pair]
except KeyError: return None
if num == src_rorp.getnumlinks():
del _src_inode_index_dict[key_pair]
def get_inode_key(rorp): def get_inode_key(rorp):
"""Return rorp's key for _inode_ dictionaries""" """Return rorp's key for _inode_ dictionaries"""
return (rorp.getinode(), rorp.getdevloc()) return (rorp.getinode(), rorp.getdevloc())
...@@ -190,6 +138,10 @@ def islinked(rorp): ...@@ -190,6 +138,10 @@ def islinked(rorp):
"""True if rorp's index is already linked to something on src side""" """True if rorp's index is already linked to something on src side"""
return len(get_indicies(rorp, 1)) >= 2 return len(get_indicies(rorp, 1)) >= 2
def get_link_index(rorp):
"""Return first index on target side rorp is already linked to"""
return get_indicies(rorp, 1)[0]
def restore_link(index, rpath): def restore_link(index, rpath):
"""Restores a linked file by linking it """Restores a linked file by linking it
...@@ -214,129 +166,13 @@ def restore_link(index, rpath): ...@@ -214,129 +166,13 @@ def restore_link(index, rpath):
_restore_index_path[index] = rpath.path _restore_index_path[index] = rpath.path
return None return None
def link_rp(src_rorp, dest_rpath, dest_root = None): def link_rp(diff_rorp, dest_rpath, dest_root = None):
"""Make dest_rpath into a link analogous to that of src_rorp""" """Make dest_rpath into a link using link flag in diff_rorp"""
if not dest_root: dest_root = dest_rpath # use base of dest_rpath if not dest_root: dest_root = dest_rpath # use base of dest_rpath
dest_link_rpath = rpath.RPath(dest_root.conn, dest_root.base, dest_link_rpath = rpath.RPath(dest_root.conn, dest_root.base,
get_indicies(src_rorp, 1)[0]) diff_rorp.get_link_flag())
dest_rpath.hardlink(dest_link_rpath.path) dest_rpath.hardlink(dest_link_rpath.path)
def write_linkdict(rpath, dict, compress = None):
"""Write link data to the rbdata dir
It is stored as the a big pickled dictionary dated to match
the current hardlinks.
"""
assert (Globals.isbackup_writer and
rpath.conn is Globals.local_connection)
tf = TempFile.new(rpath)
def init():
fp = tf.open("wb", compress)
cPickle.dump(dict, fp)
assert not fp.close()
tf.setdata()
robust.make_tf_robustaction(init, (tf,), (rpath,)).execute()
def get_linkrp(data_rpath, time, prefix):
"""Return RPath of linkdata, or None if cannot find"""
for rp in map(data_rpath.append, data_rpath.listdir()):
if (rp.isincfile() and rp.getincbase_str() == prefix and
(rp.getinctype() == 'snapshot' or rp.getinctype() == 'data')
and Time.stringtotime(rp.getinctime()) == time):
return rp
return None
def get_linkdata(data_rpath, time, prefix = 'hardlink_data'):
"""Return index dictionary written by write_linkdata at time"""
rp = get_linkrp(data_rpath, time, prefix)
if not rp: return None
fp = rp.open("rb", rp.isinccompressed())
index_dict = cPickle.load(fp)
assert not fp.close()
return index_dict
def final_writedata():
"""Write final checkpoint data to rbdir after successful backup"""
global final_inc
if _src_index_indicies:
log.Log("Writing hard link data", 6)
if Globals.compression:
final_inc = Globals.rbdir.append("hardlink_data.%s.data.gz" %
Time.curtimestr)
else: final_inc = Globals.rbdir.append("hardlink_data.%s.data" %
Time.curtimestr)
write_linkdict(final_inc, _src_index_indicies, Globals.compression)
else: # no hardlinks, so writing unnecessary
final_inc = None
def retrieve_final(time):
"""Set source index dictionary from hardlink_data file if avail"""
global _src_index_indicies
hd = get_linkdata(Globals.rbdir, time)
if hd is None: return None
_src_index_indicies = hd
return 1
def final_checkpoint(data_rpath):
"""Write contents of the four dictionaries to the data dir
If rdiff-backup receives a fatal error, it may still be able
to save the contents of the four hard link dictionaries.
Because these dictionaries may be big, they are not saved
after every 20 seconds or whatever, but just at the end.
"""
log.Log("Writing intermediate hard link data to disk", 2)
src_inode_rp = data_rpath.append("hardlink_source_inode_checkpoint."
"%s.data" % Time.curtimestr)
src_index_rp = data_rpath.append("hardlink_source_index_checkpoint."
"%s.data" % Time.curtimestr)
dest_inode_rp = data_rpath.append("hardlink_dest_inode_checkpoint."
"%s.data" % Time.curtimestr)
dest_index_rp = data_rpath.append("hardlink_dest_index_checkpoint."
"%s.data" % Time.curtimestr)
for (rp, dict) in ((src_inode_rp, _src_inode_indicies),
(src_index_rp, _src_index_indicies),
(dest_inode_rp, _dest_inode_indicies),
(dest_index_rp, _dest_index_indicies)):
write_linkdict(rp, dict)
def retrieve_checkpoint(data_rpath, time):
"""Retrieve hardlink data from final checkpoint
Return true if the retrieval worked, false otherwise.
"""
global _src_inode_indicies, _src_index_indicies
global _dest_inode_indicies, _dest_index_indicies
try:
src_inode = get_linkdata(data_rpath, time,
"hardlink_source_inode_checkpoint")
src_index = get_linkdata(data_rpath, time,
"hardlink_source_index_checkpoint")
dest_inode = get_linkdata(data_rpath, time,
"hardlink_dest_inode_checkpoint")
dest_index = get_linkdata(data_rpath, time,
"hardlink_dest_index_checkpoint")
except cPickle.UnpicklingError:
log.Log("Unpickling Error", 2)
return None
if (src_inode is None or src_index is None or
dest_inode is None or dest_index is None): return None
_src_inode_indicies, _src_index_indicies = src_inode, src_index
_dest_inode_indicies, _dest_index_indicies = dest_inode, dest_index
return 1
def remove_all_checkpoints():
"""Remove all hardlink checkpoint information from directory"""
prefix_list = ["hardlink_source_inode_checkpoint",
"hardlink_source_index_checkpoint",
"hardlink_dest_inode_checkpoint",
"hardlink_dest_index_checkpoint"]
for rp in map(Globals.rbdir.append, Globals.rbdir.listdir()):
if (rp.isincfile() and rp.getincbase_str() in prefix_list and
(rp.getinctype() == 'snapshot' or rp.getinctype() == 'data')):
rp.delete()
# Copyright 2002 Ben Escoto # Copyright 2002 Ben Escoto
# #
# This file is part of rdiff-backup. # This file is part of rdiff-backup.
...@@ -21,9 +22,9 @@ ...@@ -21,9 +22,9 @@
from __future__ import generators from __future__ import generators
import getopt, sys, re, os import getopt, sys, re, os
from log import Log from log import Log, LoggerError
import Globals, Time, SetConnections, selection, robust, rpath, \ import Globals, Time, SetConnections, selection, robust, rpath, \
manage, highlevel, connection, restore, FilenameMapping, \ manage, backup, connection, restore, FilenameMapping, \
Security, Hardlink Security, Hardlink
...@@ -146,6 +147,7 @@ def parse_cmdlineoptions(arglist): ...@@ -146,6 +147,7 @@ def parse_cmdlineoptions(arglist):
Globals.set('quoting_enabled', 1) Globals.set('quoting_enabled', 1)
Globals.set('preserve_hardlinks', 0) Globals.set('preserve_hardlinks', 0)
select_opts.append(("--exclude-special-files", None)) select_opts.append(("--exclude-special-files", None))
assert 0, "Windows mode doesn't work in this version!"
elif opt == '--windows-time-format': elif opt == '--windows-time-format':
Globals.set('time_separator', "_") Globals.set('time_separator', "_")
else: Log.FatalError("Unknown option %s" % opt) else: Log.FatalError("Unknown option %s" % opt)
...@@ -184,13 +186,6 @@ def commandline_error(message): ...@@ -184,13 +186,6 @@ def commandline_error(message):
def misc_setup(rps): def misc_setup(rps):
"""Set default change ownership flag, umask, relay regexps""" """Set default change ownership flag, umask, relay regexps"""
if ((len(rps) == 2 and rps[1].conn.os.getuid() == 0) or
(len(rps) < 2 and os.getuid() == 0)):
# Allow change_ownership if destination connection is root
for conn in Globals.connections:
conn.Globals.set('change_ownership', 1)
for rp in rps: rp.setdata() # Update with userinfo
os.umask(077) os.umask(077)
Time.setcurtime(Globals.current_time) Time.setcurtime(Globals.current_time)
FilenameMapping.set_init_quote_vals() FilenameMapping.set_init_quote_vals()
...@@ -240,14 +235,14 @@ def Backup(rpin, rpout): ...@@ -240,14 +235,14 @@ def Backup(rpin, rpout):
backup_init_dirs(rpin, rpout) backup_init_dirs(rpin, rpout)
if prevtime: if prevtime:
Time.setprevtime(prevtime) Time.setprevtime(prevtime)
highlevel.Mirror_and_increment(rpin, rpout, incdir) backup.Mirror_and_increment(rpin, rpout, incdir)
else: highlevel.Mirror(rpin, rpout) else: backup.Mirror(rpin, rpout)
rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout) rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout)
def backup_set_select(rpin): def backup_set_select(rpin):
"""Create Select objects on source connection""" """Create Select objects on source connection"""
rpin.conn.highlevel.HLSourceStruct.set_source_select(rpin, select_opts, rpin.conn.backup.SourceStruct.set_source_select(rpin, select_opts,
*select_files) *select_files)
def backup_init_dirs(rpin, rpout): def backup_init_dirs(rpin, rpout):
"""Make sure rpin and rpout are valid, init data dir and logging""" """Make sure rpin and rpout are valid, init data dir and logging"""
...@@ -322,8 +317,7 @@ def backup_get_mirrortime(): ...@@ -322,8 +317,7 @@ def backup_get_mirrortime():
"""Warning: duplicate current_mirror files found. Perhaps something """Warning: duplicate current_mirror files found. Perhaps something
went wrong during your last backup? Using """ + mirrorrps[-1].path, 2) went wrong during your last backup? Using """ + mirrorrps[-1].path, 2)
timestr = mirrorrps[-1].getinctime() return mirrorrps[-1].getinctime()
return Time.stringtotime(timestr)
def backup_touch_curmirror_local(rpin, rpout): def backup_touch_curmirror_local(rpin, rpout):
"""Make a file like current_mirror.time.data to record time """Make a file like current_mirror.time.data to record time
...@@ -348,8 +342,7 @@ def Restore(src_rp, dest_rp = None): ...@@ -348,8 +342,7 @@ def Restore(src_rp, dest_rp = None):
""" """
rpin, rpout = restore_check_paths(src_rp, dest_rp) rpin, rpout = restore_check_paths(src_rp, dest_rp)
time = Time.stringtotime(rpin.getinctime()) restore_common(rpin, rpout, rpin.getinctime())
restore_common(rpin, rpout, time)
def RestoreAsOf(rpin, target): def RestoreAsOf(rpin, target):
"""Secondary syntax for restore operation """Secondary syntax for restore operation
...@@ -365,12 +358,14 @@ def RestoreAsOf(rpin, target): ...@@ -365,12 +358,14 @@ def RestoreAsOf(rpin, target):
def restore_common(rpin, target, time): def restore_common(rpin, target, time):
"""Restore operation common to Restore and RestoreAsOf""" """Restore operation common to Restore and RestoreAsOf"""
if target.conn.os.getuid() == 0:
SetConnections.UpdateGlobal('change_ownership', 1)
mirror_root, index = restore_get_root(rpin) mirror_root, index = restore_get_root(rpin)
mirror = mirror_root.new_index(index) mirror = mirror_root.new_index(index)
inc_rpath = datadir.append_path('increments', index) inc_rpath = datadir.append_path('increments', index)
restore_init_select(mirror_root, target) restore_init_select(mirror_root, target)
restore_start_log(rpin, target, time) restore_start_log(rpin, target, time)
restore.Restore(inc_rpath, mirror, target, time) restore.Restore(mirror, inc_rpath, target, time)
Log("Restore ended", 4) Log("Restore ended", 4)
def restore_start_log(rpin, target, time): def restore_start_log(rpin, target, time):
...@@ -397,9 +392,9 @@ Try restoring from an increment file (the filenames look like ...@@ -397,9 +392,9 @@ Try restoring from an increment file (the filenames look like
if not rpout: rpout = rpath.RPath(Globals.local_connection, if not rpout: rpout = rpath.RPath(Globals.local_connection,
rpin.getincbase_str()) rpin.getincbase_str())
if rpout.lstat(): if rpout.lstat() and not force:
Log.FatalError("Restore target %s already exists, " Log.FatalError("Restore target %s already exists, "
"and will not be overwritten." % rpout.path) "specify --force to overwrite." % rpout.path)
return rpin, rpout return rpin, rpout
def restore_init_select(rpin, rpout): def restore_init_select(rpin, rpout):
...@@ -462,15 +457,13 @@ def restore_get_root(rpin): ...@@ -462,15 +457,13 @@ def restore_get_root(rpin):
def ListIncrements(rp): def ListIncrements(rp):
"""Print out a summary of the increments and their times""" """Print out a summary of the increments and their times"""
mirror_root, index = restore_get_root(rp) mirror_root, index = restore_get_root(rp)
Globals.rbdir = datadir = \ mirror_rp = mirror_root.new_index(index)
mirror_root.append_path("rdiff-backup-data") inc_rpath = Globals.rbdir.append_path('increments', index)
mirrorrp = mirror_root.new_index(index)
inc_rpath = datadir.append_path('increments', index)
incs = restore.get_inclist(inc_rpath) incs = restore.get_inclist(inc_rpath)
mirror_time = restore.get_mirror_time() mirror_time = restore.MirrorStruct.get_mirror_time()
if Globals.parsable_output: if Globals.parsable_output:
print manage.describe_incs_parsable(incs, mirror_time, mirrorrp) print manage.describe_incs_parsable(incs, mirror_time, mirror_rp)
else: print manage.describe_incs_human(incs, mirror_time, mirrorrp) else: print manage.describe_incs_human(incs, mirror_time, mirror_rp)
def CalculateAverage(rps): def CalculateAverage(rps):
...@@ -493,8 +486,8 @@ def RemoveOlderThan(rootrp): ...@@ -493,8 +486,8 @@ def RemoveOlderThan(rootrp):
timep = Time.timetopretty(time) timep = Time.timetopretty(time)
Log("Deleting increment(s) before %s" % timep, 4) Log("Deleting increment(s) before %s" % timep, 4)
times_in_secs = map(lambda inc: Time.stringtotime(inc.getinctime()), times_in_secs = [inc.getinctime() for inc in
restore.get_inclist(datadir.append("increments"))) restore.get_inclist(datadir.append("increments"))]
times_in_secs = filter(lambda t: t < time, times_in_secs) times_in_secs = filter(lambda t: t < time, times_in_secs)
if not times_in_secs: if not times_in_secs:
Log.FatalError("No increments older than %s found" % timep) Log.FatalError("No increments older than %s found" % timep)
...@@ -517,30 +510,7 @@ def ListChangedSince(rp): ...@@ -517,30 +510,7 @@ def ListChangedSince(rp):
try: rest_time = Time.genstrtotime(restore_timestr) try: rest_time = Time.genstrtotime(restore_timestr)
except Time.TimeException, exc: Log.FatalError(str(exc)) except Time.TimeException, exc: Log.FatalError(str(exc))
mirror_root, index = restore_get_root(rp) mirror_root, index = restore_get_root(rp)
Globals.rbdir = datadir = mirror_root.append_path("rdiff-backup-data") mirror_rp = mirror_root.new_index(index)
mirror_time = restore.get_mirror_time() inc_rp = mirror_rp.append_path("increments", index)
restore.ListChangedSince(mirror_rp, inc_rp, rest_time)
def get_rids_recursive(rid):
"""Yield all the rids under rid that have inc newer than rest_time"""
yield rid
for sub_rid in restore.yield_rids(rid, rest_time, mirror_time):
for sub_sub_rid in get_rids_recursive(sub_rid): yield sub_sub_rid
def determineChangeType(incList):
"returns the type of change determined from incList"
assert len(incList) > 0
last_inc_type = incList[-1].getinctype() # examine earliest change
if last_inc_type == 'snapshot': return "misc change"
elif last_inc_type == 'missing': return "new file"
elif last_inc_type == 'diff': return "modified"
elif last_inc_type == 'dir': return "dir change"
else: return "Unknown!"
inc_rpath = datadir.append_path('increments', index)
inc_list = restore.get_inclist(inc_rpath)
root_rid = restore.RestoreIncrementData(index, inc_rpath, inc_list)
for rid in get_rids_recursive(root_rid):
if rid.inc_list:
print "%-11s: %s" % (determineChangeType(rid.inc_list),
rid.get_indexpath())
...@@ -17,75 +17,53 @@ ...@@ -17,75 +17,53 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA # USA
"""Invoke rdiff utility to make signatures, deltas, or patch """Invoke rdiff utility to make signatures, deltas, or patch"""
All these operations should be done in a relatively safe manner using
RobustAction and the like.
"""
import os, librsync import os, librsync
from log import Log import Globals, log, static, TempFile, rpath
import robust, TempFile, Globals
def get_signature(rp): def get_signature(rp):
"""Take signature of rpin file and return in file object""" """Take signature of rpin file and return in file object"""
Log("Getting signature of %s" % rp.path, 7) log.Log("Getting signature of %s" % rp.path, 7)
return librsync.SigFile(rp.open("rb")) return librsync.SigFile(rp.open("rb"))
def get_delta_sigfileobj(sig_fileobj, rp_new): def get_delta_sigfileobj(sig_fileobj, rp_new):
"""Like get_delta but signature is in a file object""" """Like get_delta but signature is in a file object"""
Log("Getting delta of %s with signature stream" % (rp_new.path,), 7) log.Log("Getting delta of %s with signature stream" % (rp_new.path,), 7)
return librsync.DeltaFile(sig_fileobj, rp_new.open("rb")) return librsync.DeltaFile(sig_fileobj, rp_new.open("rb"))
def get_delta_sigrp(rp_signature, rp_new): def get_delta_sigrp(rp_signature, rp_new):
"""Take signature rp and new rp, return delta file object""" """Take signature rp and new rp, return delta file object"""
Log("Getting delta of %s with signature %s" % log.Log("Getting delta of %s with signature %s" %
(rp_new.path, rp_signature.get_indexpath()), 7) (rp_new.path, rp_signature.get_indexpath()), 7)
return librsync.DeltaFile(rp_signature.open("rb"), rp_new.open("rb")) return librsync.DeltaFile(rp_signature.open("rb"), rp_new.open("rb"))
def write_delta_action(basis, new, delta, compress = None):
"""Return action writing delta which brings basis to new
If compress is true, the output of rdiff will be gzipped
before written to delta.
"""
delta_tf = TempFile.new(delta)
def init(): write_delta(basis, new, delta_tf, compress)
return robust.make_tf_robustaction(init, delta_tf, delta)
def write_delta(basis, new, delta, compress = None): def write_delta(basis, new, delta, compress = None):
"""Write rdiff delta which brings basis to new""" """Write rdiff delta which brings basis to new"""
Log("Writing delta %s from %s -> %s" % log.Log("Writing delta %s from %s -> %s" %
(basis.path, new.path, delta.path), 7) (basis.path, new.path, delta.path), 7)
sigfile = librsync.SigFile(basis.open("rb")) sigfile = librsync.SigFile(basis.open("rb"))
deltafile = librsync.DeltaFile(sigfile, new.open("rb")) deltafile = librsync.DeltaFile(sigfile, new.open("rb"))
delta.write_from_fileobj(deltafile, compress) delta.write_from_fileobj(deltafile, compress)
def patch_action(rp_basis, rp_delta, rp_out = None, out_tf = None, def write_patched_fp(basis_fp, delta_fp, out_fp):
delta_compressed = None): """Write patched file to out_fp given input fps. Closes input files"""
"""Return RobustAction which patches rp_basis with rp_delta rpath.copyfileobj(librsync.PatchedFile(basis_fp, delta_fp), out_fp)
assert not basis_fp.close() and not delta_fp.close()
If rp_out is None, put output in rp_basis. Will use TempFile def write_via_tempfile(fp, rp):
out_tf it is specified. If delta_compressed is true, the """Write fileobj fp to rp by writing to tempfile and renaming"""
delta file will be decompressed before processing with rdiff. tf = TempFile.new(rp)
tf.write_from_fileobj(fp)
tf.rename(rp)
""" def patch_local(rp_basis, rp_delta, outrp = None, delta_compressed = None):
if not rp_out: rp_out = rp_basis """Patch routine that must be run locally, writes to outrp
if not out_tf: out_tf = TempFile.new(rp_out)
def init():
rp_basis.conn.Rdiff.patch_local(rp_basis, rp_delta,
out_tf, delta_compressed)
out_tf.setdata()
return robust.make_tf_robustaction(init, out_tf, rp_out)
def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
"""Patch routine that must be run on rp_basis.conn
This is because librsync may need to seek() around in rp_basis, This should be run local to rp_basis because it needs to be a real
and so needs a real file. Other rpaths can be remote. file (librsync may need to seek around in it). If outrp is None,
patch rp_basis instead.
""" """
assert rp_basis.conn is Globals.local_connection assert rp_basis.conn is Globals.local_connection
...@@ -94,32 +72,19 @@ def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None): ...@@ -94,32 +72,19 @@ def patch_local(rp_basis, rp_delta, outrp, delta_compressed = None):
sigfile = librsync.SigFile(rp_basis.open("rb")) sigfile = librsync.SigFile(rp_basis.open("rb"))
patchfile = librsync.PatchedFile(rp_basis.open("rb"), deltafile) patchfile = librsync.PatchedFile(rp_basis.open("rb"), deltafile)
outrp.write_from_fileobj(patchfile)
if outrp: outrp.write_from_fileobj(patchfile)
def patch_with_attribs_action(rp_basis, rp_delta, rp_out = None): else: write_via_tempfile(patchfile, rp_basis)
"""Like patch_action, but also transfers attributs from rp_delta"""
if not rp_out: rp_out = rp_basis def copy_local(rpin, rpout, rpnew = None):
tf = TempFile.new(rp_out)
return robust.chain_nested(patch_action(rp_basis, rp_delta, rp_out, tf),
robust.copy_attribs_action(rp_delta, tf))
def copy_action(rpin, rpout):
"""Use rdiff to copy rpin to rpout, conserving bandwidth"""
if not rpin.isreg() or not rpout.isreg() or rpin.conn is rpout.conn:
# rdiff not applicable, fallback to regular copying
return robust.copy_action(rpin, rpout)
Log("Rdiff copying %s to %s" % (rpin.path, rpout.path), 6)
out_tf = TempFile.new(rpout)
def init(): rpout.conn.Rdiff.copy_local(rpin, rpout, out_tf)
return robust.make_tf_robustaction(init, out_tf, rpout)
def copy_local(rpin, rpout, rpnew):
"""Write rpnew == rpin using rpout as basis. rpout and rpnew local""" """Write rpnew == rpin using rpout as basis. rpout and rpnew local"""
assert rpnew.conn is rpout.conn is Globals.local_connection assert rpout.conn is Globals.local_connection
sigfile = librsync.SigFile(rpout.open("rb")) sigfile = librsync.SigFile(rpout.open("rb"))
deltafile = rpin.conn.librsync.DeltaFile(sigfile, rpin.open("rb")) deltafile = rpin.conn.librsync.DeltaFile(sigfile, rpin.open("rb"))
rpnew.write_from_fileobj(librsync.PatchedFile(rpout.open("rb"), deltafile)) patched_file = librsync.PatchedFile(rpout.open("rb"), deltafile)
if rpnew: rpnew.write_from_fileobj(patched_file)
else: write_via_tempfile(patched_file, rpout)
...@@ -112,6 +112,7 @@ def set_allowed_requests(sec_level): ...@@ -112,6 +112,7 @@ def set_allowed_requests(sec_level):
"Globals.get_dict_val", "Globals.get_dict_val",
"log.Log.open_logfile_allconn", "log.Log.open_logfile_allconn",
"log.Log.close_logfile_allconn", "log.Log.close_logfile_allconn",
"Log.log_to_file",
"SetConnections.add_redirected_conn", "SetConnections.add_redirected_conn",
"RedirectedRun", "RedirectedRun",
"sys.stdout.write"] "sys.stdout.write"]
...@@ -123,20 +124,21 @@ def set_allowed_requests(sec_level): ...@@ -123,20 +124,21 @@ def set_allowed_requests(sec_level):
"os.listdir", "os.listdir",
"Time.setcurtime_local", "Time.setcurtime_local",
"robust.Resume.ResumeCheck", "robust.Resume.ResumeCheck",
"highlevel.HLSourceStruct.split_initial_dsiter", "backup.SourceStruct.split_initial_dsiter",
"highlevel.HLSourceStruct.get_diffs_and_finalize", "backup.SourceStruct.get_diffs_and_finalize",
"rpath.gzip_open_local_read", "rpath.gzip_open_local_read",
"rpath.open_local_read"]) "rpath.open_local_read"])
if sec_level == "update-only": if sec_level == "update-only":
allowed_requests.extend( allowed_requests.extend(
["Log.open_logfile_local", "Log.close_logfile_local", ["Log.open_logfile_local", "Log.close_logfile_local",
"Log.close_logfile_allconn", "Log.log_to_file", "Log.close_logfile_allconn", "Log.log_to_file",
"log.Log.log_to_file",
"robust.SaveState.init_filenames", "robust.SaveState.init_filenames",
"robust.SaveState.touch_last_file", "robust.SaveState.touch_last_file",
"highlevel.HLDestinationStruct.get_sigs", "backup.DestinationStruct.get_sigs",
"highlevel.HLDestinationStruct.patch_w_datadir_writes", "backup.DestinationStruct.patch_w_datadir_writes",
"highlevel.HLDestinationStruct.patch_and_finalize", "backup.DestinationStruct.patch_and_finalize",
"highlevel.HLDestinationStruct.patch_increment_and_finalize", "backup.DestinationStruct.patch_increment_and_finalize",
"Main.backup_touch_curmirror_local", "Main.backup_touch_curmirror_local",
"Globals.ITRB.increment_stat"]) "Globals.ITRB.increment_stat"])
if Globals.server: if Globals.server:
...@@ -148,8 +150,8 @@ def set_allowed_requests(sec_level): ...@@ -148,8 +150,8 @@ def set_allowed_requests(sec_level):
"FilenameMapping.set_init_quote_vals_local", "FilenameMapping.set_init_quote_vals_local",
"Globals.postset_regexp_local", "Globals.postset_regexp_local",
"Globals.set_select", "Globals.set_select",
"highlevel.HLSourceStruct.set_session_info", "backup.SourceStruct.set_session_info",
"highlevel.HLDestinationStruct.set_session_info"]) "backup.DestinationStruct.set_session_info"])
def vet_request(request, arglist): def vet_request(request, arglist):
"""Examine request for security violations""" """Examine request for security violations"""
......
...@@ -201,6 +201,7 @@ def BackupInitConnections(reading_conn, writing_conn): ...@@ -201,6 +201,7 @@ def BackupInitConnections(reading_conn, writing_conn):
writing_conn.Globals.set("isbackup_writer", 1) writing_conn.Globals.set("isbackup_writer", 1)
UpdateGlobal("backup_reader", reading_conn) UpdateGlobal("backup_reader", reading_conn)
UpdateGlobal("backup_writer", writing_conn) UpdateGlobal("backup_writer", writing_conn)
if writing_conn.os.getuid() == 0: UpdateGlobal('change_ownership', 1)
def CloseConnections(): def CloseConnections():
"""Close all connections. Run by client""" """Close all connections. Run by client"""
......
...@@ -40,7 +40,7 @@ def setcurtime(curtime = None): ...@@ -40,7 +40,7 @@ def setcurtime(curtime = None):
"""Sets the current time in curtime and curtimestr on all systems""" """Sets the current time in curtime and curtimestr on all systems"""
t = curtime or time.time() t = curtime or time.time()
for conn in Globals.connections: for conn in Globals.connections:
conn.Time.setcurtime_local(t) conn.Time.setcurtime_local(long(t))
def setcurtime_local(timeinseconds): def setcurtime_local(timeinseconds):
"""Only set the current time locally""" """Only set the current time locally"""
......
This diff is collapsed.
...@@ -96,7 +96,6 @@ class LowLevelPipeConnection(Connection): ...@@ -96,7 +96,6 @@ class LowLevelPipeConnection(Connection):
b - string b - string
q - quit signal q - quit signal
t - TempFile t - TempFile
d - DSRPath
R - RPath R - RPath
r - RORPath only r - RORPath only
c - PipeConnection object c - PipeConnection object
...@@ -124,8 +123,6 @@ class LowLevelPipeConnection(Connection): ...@@ -124,8 +123,6 @@ class LowLevelPipeConnection(Connection):
elif isinstance(obj, connection.Connection):self._putconn(obj, req_num) elif isinstance(obj, connection.Connection):self._putconn(obj, req_num)
elif isinstance(obj, TempFile.TempFile): elif isinstance(obj, TempFile.TempFile):
self._puttempfile(obj, req_num) self._puttempfile(obj, req_num)
elif isinstance(obj, destructive_stepping.DSRPath):
self._putdsrpath(obj, req_num)
elif isinstance(obj, rpath.RPath): self._putrpath(obj, req_num) elif isinstance(obj, rpath.RPath): self._putrpath(obj, req_num)
elif isinstance(obj, rpath.RORPath): self._putrorpath(obj, req_num) elif isinstance(obj, rpath.RORPath): self._putrorpath(obj, req_num)
elif ((hasattr(obj, "read") or hasattr(obj, "write")) elif ((hasattr(obj, "read") or hasattr(obj, "write"))
...@@ -157,11 +154,6 @@ class LowLevelPipeConnection(Connection): ...@@ -157,11 +154,6 @@ class LowLevelPipeConnection(Connection):
tempfile.index, tempfile.data) tempfile.index, tempfile.data)
self._write("t", cPickle.dumps(tf_repr, 1), req_num) self._write("t", cPickle.dumps(tf_repr, 1), req_num)
def _putdsrpath(self, dsrpath, req_num):
"""Put DSRPath into pipe. See _putrpath"""
dsrpath_repr = (dsrpath.conn.conn_number, dsrpath.getstatedict())
self._write("d", cPickle.dumps(dsrpath_repr, 1), req_num)
def _putrpath(self, rpath, req_num): def _putrpath(self, rpath, req_num):
"""Put an rpath into the pipe """Put an rpath into the pipe
...@@ -246,7 +238,6 @@ class LowLevelPipeConnection(Connection): ...@@ -246,7 +238,6 @@ class LowLevelPipeConnection(Connection):
elif format_string == "t": result = self._gettempfile(data) elif format_string == "t": result = self._gettempfile(data)
elif format_string == "r": result = self._getrorpath(data) elif format_string == "r": result = self._getrorpath(data)
elif format_string == "R": result = self._getrpath(data) elif format_string == "R": result = self._getrpath(data)
elif format_string == "d": result = self._getdsrpath(data)
else: else:
assert format_string == "c", header_string assert format_string == "c", header_string
result = Globals.connection_dict[int(data)] result = Globals.connection_dict[int(data)]
...@@ -270,16 +261,6 @@ class LowLevelPipeConnection(Connection): ...@@ -270,16 +261,6 @@ class LowLevelPipeConnection(Connection):
return rpath.RPath(Globals.connection_dict[conn_number], return rpath.RPath(Globals.connection_dict[conn_number],
base, index, data) base, index, data)
def _getdsrpath(self, raw_dsrpath_buf):
"""Return DSRPath object indicated by buf"""
conn_number, state_dict = cPickle.loads(raw_dsrpath_buf)
empty_dsrp = destructive_stepping.DSRPath("bypass",
Globals.local_connection, None)
empty_dsrp.__setstate__(state_dict)
empty_dsrp.conn = Globals.connection_dict[conn_number]
empty_dsrp.file = None
return empty_dsrp
def _close(self): def _close(self):
"""Close the pipes associated with the connection""" """Close the pipes associated with the connection"""
self.outpipe.close() self.outpipe.close()
...@@ -544,8 +525,8 @@ class VirtualFile: ...@@ -544,8 +525,8 @@ class VirtualFile:
# put at bottom to reduce circularities. # put at bottom to reduce circularities.
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \ import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \
Main, rorpiter, selection, increment, statistics, manage, lazy, \ Main, rorpiter, selection, increment, statistics, manage, lazy, \
iterfile, rpath, robust, restore, manage, highlevel, connection, \ iterfile, rpath, robust, restore, manage, backup, connection, \
TempFile, destructive_stepping, SetConnections TempFile, SetConnections, librsync
from log import Log from log import Log
Globals.local_connection = LocalConnection() Globals.local_connection = LocalConnection()
......
...@@ -56,7 +56,7 @@ def describe_incs_parsable(incs, mirror_time, mirrorrp): ...@@ -56,7 +56,7 @@ def describe_incs_parsable(incs, mirror_time, mirrorrp):
50000 regular <- last will be the current mirror 50000 regular <- last will be the current mirror
""" """
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs] incpairs = [(inc.getinctime(), inc) for inc in incs]
incpairs.sort() incpairs.sort()
result = ["%s %s" % (time, get_inc_type(inc)) for time, inc in incpairs] result = ["%s %s" % (time, get_inc_type(inc)) for time, inc in incpairs]
result.append("%s %s" % (mirror_time, get_file_type(mirrorrp))) result.append("%s %s" % (mirror_time, get_file_type(mirrorrp)))
...@@ -64,7 +64,7 @@ def describe_incs_parsable(incs, mirror_time, mirrorrp): ...@@ -64,7 +64,7 @@ def describe_incs_parsable(incs, mirror_time, mirrorrp):
def describe_incs_human(incs, mirror_time, mirrorrp): def describe_incs_human(incs, mirror_time, mirrorrp):
"""Return a string describing all the the root increments""" """Return a string describing all the the root increments"""
incpairs = [(Time.stringtotime(inc.getinctime()), inc) for inc in incs] incpairs = [(inc.getinctime(), inc) for inc in incs]
incpairs.sort() incpairs.sort()
result = ["Found %d increments:" % len(incpairs)] result = ["Found %d increments:" % len(incpairs)]
...@@ -95,8 +95,7 @@ def delete_earlier_than_local(baserp, time): ...@@ -95,8 +95,7 @@ def delete_earlier_than_local(baserp, time):
yield sub_rp yield sub_rp
for rp in yield_files(baserp): for rp in yield_files(baserp):
if ((rp.isincfile() and if ((rp.isincfile() and rp.getinctime() < time) or
Time.stringtotime(rp.getinctime()) < time) or
(rp.isdir() and not rp.listdir())): (rp.isdir() and not rp.listdir())):
Log("Deleting increment file %s" % rp.path, 5) Log("Deleting increment file %s" % rp.path, 5)
rp.delete() rp.delete()
...@@ -114,7 +113,7 @@ class IncObj: ...@@ -114,7 +113,7 @@ class IncObj:
if not incrp.isincfile(): if not incrp.isincfile():
raise ManageException("%s is not an inc file" % incrp.path) raise ManageException("%s is not an inc file" % incrp.path)
self.incrp = incrp self.incrp = incrp
self.time = Time.stringtotime(incrp.getinctime()) self.time = incrp.getinctime()
def getbaserp(self): def getbaserp(self):
"""Return rp of the incrp without extensions""" """Return rp of the incrp without extensions"""
......
...@@ -56,7 +56,7 @@ field names and values. ...@@ -56,7 +56,7 @@ field names and values.
from __future__ import generators from __future__ import generators
import re, gzip import re, gzip
import log, Globals, rpath, Time, robust import log, Globals, rpath, Time, robust, increment
class ParsingError(Exception): class ParsingError(Exception):
"""This is raised when bad or unparsable data is received""" """This is raised when bad or unparsable data is received"""
...@@ -207,6 +207,7 @@ class rorp_extractor: ...@@ -207,6 +207,7 @@ class rorp_extractor:
log.Log("Error parsing metadata file: %s" % (e,), 2) log.Log("Error parsing metadata file: %s" % (e,), 2)
if self.at_end: break if self.at_end: break
self.buf = self.buf[next_pos:] self.buf = self.buf[next_pos:]
assert not self.close()
def skip_to_index(self, index): def skip_to_index(self, index):
"""Scan through the file, set buffer to beginning of index record """Scan through the file, set buffer to beginning of index record
...@@ -250,6 +251,7 @@ class rorp_extractor: ...@@ -250,6 +251,7 @@ class rorp_extractor:
yield rorp yield rorp
if self.at_end: break if self.at_end: break
self.buf = self.buf[next_pos:] self.buf = self.buf[next_pos:]
assert not self.close()
def close(self): def close(self):
"""Return value of closing associated file""" """Return value of closing associated file"""
...@@ -264,9 +266,10 @@ def OpenMetadata(rp = None, compress = 1): ...@@ -264,9 +266,10 @@ def OpenMetadata(rp = None, compress = 1):
assert not metadata_fileobj, "Metadata file already open" assert not metadata_fileobj, "Metadata file already open"
if rp: metadata_rp = rp if rp: metadata_rp = rp
else: else:
if compress: filename_base = "mirror_metadata.%s.data.gz" if compress: typestr = 'data.gz'
else: filename_base = "mirror_metadata.%s.data" else: typestr = 'data'
metadata_rp = Globals.rbdir.append(filename_base % (Time.curtimestr,)) metadata_rp = Globals.rbdir.append("mirror_metadata.%s.%s" %
(Time.curtimestr, typestr))
metadata_fileobj = metadata_rp.open("wb", compress = compress) metadata_fileobj = metadata_rp.open("wb", compress = compress)
def WriteMetadata(rorp): def WriteMetadata(rorp):
...@@ -307,8 +310,7 @@ def GetMetadata_at_time(rbdir, time, restrict_index = None, rblist = None): ...@@ -307,8 +310,7 @@ def GetMetadata_at_time(rbdir, time, restrict_index = None, rblist = None):
for rp in rblist: for rp in rblist:
if (rp.isincfile() and rp.getinctype() == "data" and if (rp.isincfile() and rp.getinctype() == "data" and
rp.getincbase_str() == "mirror_metadata"): rp.getincbase_str() == "mirror_metadata"):
if Time.stringtotime(rp.getinctime()) == time: if rp.getinctime() == time: return GetMetadata(rp, restrict_index)
return GetMetadata(rp, restrict_index)
return None return None
This diff is collapsed.
This diff is collapsed.
...@@ -53,6 +53,12 @@ def FromRaw(raw_iter): ...@@ -53,6 +53,12 @@ def FromRaw(raw_iter):
rorp.setfile(getnext(raw_iter)) rorp.setfile(getnext(raw_iter))
yield rorp yield rorp
def getnext(iter):
"""Return the next element of an iterator, raising error if none"""
try: next = iter.next()
except StopIteration: raise RORPIterException("Unexpected end to iter")
return next
def ToFile(rorp_iter): def ToFile(rorp_iter):
"""Return file version of iterator""" """Return file version of iterator"""
return iterfile.FileWrappingIter(ToRaw(rorp_iter)) return iterfile.FileWrappingIter(ToRaw(rorp_iter))
...@@ -143,90 +149,23 @@ def Collate2Iters(riter1, riter2): ...@@ -143,90 +149,23 @@ def Collate2Iters(riter1, riter2):
yield (None, relem2) yield (None, relem2)
relem2 = None relem2 = None
def getnext(iter): def get_dissimilar_indicies(src_init_iter, dest_init_iter, statfileobj = None):
"""Return the next element of an iterator, raising error if none"""
try: next = iter.next()
except StopIteration: raise RORPIterException("Unexpected end to iter")
return next
def get_dissimilar_indicies(src_init_iter, dest_init_iter):
"""Get dissimilar indicies given two rorpiters """Get dissimilar indicies given two rorpiters
Returns an iterator which enumerates the indicies of the rorps Returns an iterator which enumerates the indicies of the rorps
which are different on the source and destination ends. which are different on the source and destination ends. If
statfileobj is given, call add_changed on each pair of different
indicies.
""" """
collated = Collate2Iters(src_init_iter, dest_init_iter) collated = Collate2Iters(src_init_iter, dest_init_iter)
for src_rorp, dest_rorp in collated: for src_rorp, dest_rorp in collated:
if not src_rorp: yield dest_rorp.index if (src_rorp and dest_rorp and src_rorp == dest_rorp and
elif not dest_rorp: yield src_rorp.index (not Globals.preserve_hardlinks or
elif not src_rorp == dest_rorp: yield dest_rorp.index Hardlink.rorp_eq(src_rorp, dest_rorp))): continue
elif (Globals.preserve_hardlinks and not if statfileobj: statfileobj.add_changed(src_rorp, dest_rorp)
Hardlink.rorp_eq(src_rorp, dest_rorp)): yield dest_rorp.index if not dest_rorp: yield src_rorp.index
else: yield dest_rorp.index
def GetDiffIter(sig_iter, new_iter):
"""Return delta iterator from sig_iter to new_iter
The accompanying file for each will be a delta as produced by
rdiff, unless the destination file does not exist, in which
case it will be the file in its entirety.
sig_iter may be composed of rorps, but new_iter should have
full RPaths.
"""
collated_iter = CollateIterators(sig_iter, new_iter)
for rorp, rp in collated_iter: yield diffonce(rorp, rp)
def diffonce(sig_rorp, new_rp):
"""Return one diff rorp, based from signature rorp and orig rp"""
if sig_rorp and Globals.preserve_hardlinks and sig_rorp.isflaglinked():
if new_rp: diff_rorp = new_rp.getRORPath()
else: diff_rorp = rpath.RORPath(sig_rorp.index)
diff_rorp.flaglinked()
return diff_rorp
elif sig_rorp and sig_rorp.isreg() and new_rp and new_rp.isreg():
diff_rorp = new_rp.getRORPath()
#fp = sig_rorp.open("rb")
#print "---------------------", fp
#tmp_sig_rp = RPath(Globals.local_connection, "/tmp/sig")
#tmp_sig_rp.delete()
#tmp_sig_rp.write_from_fileobj(fp)
#diff_rorp.setfile(Rdiff.get_delta_sigfileobj(tmp_sig_rp.open("rb"),
# new_rp))
diff_rorp.setfile(Rdiff.get_delta_sigfileobj(sig_rorp.open("rb"),
new_rp))
diff_rorp.set_attached_filetype('diff')
return diff_rorp
else:
# Just send over originial if diff isn't appropriate
if sig_rorp: sig_rorp.close_if_necessary()
if not new_rp: return rpath.RORPath(sig_rorp.index)
elif new_rp.isreg():
diff_rorp = new_rp.getRORPath(1)
diff_rorp.set_attached_filetype('snapshot')
return diff_rorp
else: return new_rp.getRORPath()
def patchonce_action(base_rp, basisrp, diff_rorp):
"""Return action patching basisrp using diff_rorp"""
assert diff_rorp, "Missing diff index %s" % basisrp.index
if not diff_rorp.lstat():
return robust.Action(None, lambda init_val: basisrp.delete(), None)
if Globals.preserve_hardlinks and diff_rorp.isflaglinked():
if not basisrp: basisrp = base_rp.new_index(diff_rorp.index)
tf = TempFile.new(basisrp)
def init(): Hardlink.link_rp(diff_rorp, tf, basisrp)
return robust.make_tf_robustaction(init, tf, basisrp)
elif basisrp and basisrp.isreg() and diff_rorp.isreg():
if diff_rorp.get_attached_filetype() != 'diff':
raise rpath.RPathException("File %s appears to have changed during"
" processing, skipping" % (basisrp.path,))
return Rdiff.patch_with_attribs_action(basisrp, diff_rorp)
else: # Diff contains whole file, just copy it over
if not basisrp: basisrp = base_rp.new_index(diff_rorp.index)
return robust.copy_with_attribs_action(diff_rorp, basisrp)
class IndexedTuple(UserList.UserList): class IndexedTuple(UserList.UserList):
...@@ -277,12 +216,15 @@ def FillInIter(rpiter, rootrp): ...@@ -277,12 +216,15 @@ def FillInIter(rpiter, rootrp):
(2,5). This is used when we need to process directories before or (2,5). This is used when we need to process directories before or
after processing a file in that directory. after processing a file in that directory.
If start_index is given, start with start_index instead of ().
The indicies of rest of the rorps should also start with
start_index.
""" """
# Handle first element as special case # Handle first element as special case
first_rp = rpiter.next() # StopIteration gets passed upwards first_rp = rpiter.next() # StopIteration gets passed upwards
cur_index = first_rp.index cur_index = first_rp.index
for i in range(len(cur_index)): for i in range(len(cur_index)): yield rootrp.new_index(cur_index[:i])
yield rootrp.new_index(cur_index[:i])
yield first_rp yield first_rp
del first_rp del first_rp
old_index = cur_index old_index = cur_index
...@@ -294,7 +236,6 @@ def FillInIter(rpiter, rootrp): ...@@ -294,7 +236,6 @@ def FillInIter(rpiter, rootrp):
for i in range(1, len(cur_index)): # i==0 case already handled for i in range(1, len(cur_index)): # i==0 case already handled
if cur_index[:i] != old_index[:i]: if cur_index[:i] != old_index[:i]:
filler_rp = rootrp.new_index(cur_index[:i]) filler_rp = rootrp.new_index(cur_index[:i])
assert filler_rp.isdir(), "This shouldn't be possible"
yield filler_rp yield filler_rp
yield rp yield rp
old_index = cur_index old_index = cur_index
...@@ -318,6 +259,7 @@ class IterTreeReducer: ...@@ -318,6 +259,7 @@ class IterTreeReducer:
self.index = None self.index = None
self.root_branch = branch_class(*branch_args) self.root_branch = branch_class(*branch_args)
self.branches = [self.root_branch] self.branches = [self.root_branch]
self.root_fast_processed = None
def finish_branches(self, index): def finish_branches(self, index):
"""Run Finish() on all branches index has passed """Run Finish() on all branches index has passed
...@@ -355,6 +297,7 @@ class IterTreeReducer: ...@@ -355,6 +297,7 @@ class IterTreeReducer:
def Finish(self): def Finish(self):
"""Call at end of sequence to tie everything up""" """Call at end of sequence to tie everything up"""
if self.index is None or self.root_fast_processed: return
while 1: while 1:
to_be_finished = self.branches.pop() to_be_finished = self.branches.pop()
to_be_finished.call_end_proc() to_be_finished.call_end_proc()
...@@ -375,7 +318,10 @@ class IterTreeReducer: ...@@ -375,7 +318,10 @@ class IterTreeReducer:
index = args[0] index = args[0]
if self.index is None: if self.index is None:
self.root_branch.base_index = index self.root_branch.base_index = index
self.process_w_branch(self.root_branch, args) if self.root_branch.can_fast_process(*args):
self.root_branch.fast_process(*args)
self.root_fast_processed = 1
else: self.process_w_branch(self.root_branch, args)
self.index = index self.index = index
return 1 return 1
...@@ -389,7 +335,8 @@ class IterTreeReducer: ...@@ -389,7 +335,8 @@ class IterTreeReducer:
last_branch = self.branches[-1] last_branch = self.branches[-1]
if last_branch.start_successful: if last_branch.start_successful:
if last_branch.can_fast_process(*args): if last_branch.can_fast_process(*args):
last_branch.fast_process(*args) robust.check_common_error(last_branch.on_error,
last_branch.fast_process, args)
else: else:
branch = self.add_branch(index) branch = self.add_branch(index)
self.process_w_branch(branch, args) self.process_w_branch(branch, args)
...@@ -452,7 +399,7 @@ class ITRBranch: ...@@ -452,7 +399,7 @@ class ITRBranch:
def log_prev_error(self, index): def log_prev_error(self, index):
"""Call function if no pending exception""" """Call function if no pending exception"""
log.Log("Skipping %s because of previous error" % log.Log("Skipping %s because of previous error" % \
(os.path.join(*index),), 2) (index and os.path.join(*index) or '()',), 2)
...@@ -35,7 +35,7 @@ are dealing with are local or remote. ...@@ -35,7 +35,7 @@ are dealing with are local or remote.
""" """
import os, stat, re, sys, shutil, gzip, socket, time, shutil import os, stat, re, sys, shutil, gzip, socket, time
import Globals, FilenameMapping, Time, static, log import Globals, FilenameMapping, Time, static, log
...@@ -81,7 +81,7 @@ def move(rpin, rpout): ...@@ -81,7 +81,7 @@ def move(rpin, rpout):
copy(rpin, rpout) copy(rpin, rpout)
rpin.delete() rpin.delete()
def copy(rpin, rpout): def copy(rpin, rpout, compress = 0):
"""Copy RPath rpin to rpout. Works for symlinks, dirs, etc.""" """Copy RPath rpin to rpout. Works for symlinks, dirs, etc."""
log.Log("Regular copying %s to %s" % (rpin.index, rpout.path), 6) log.Log("Regular copying %s to %s" % (rpin.index, rpout.path), 6)
if not rpin.lstat(): if not rpin.lstat():
...@@ -93,7 +93,7 @@ def copy(rpin, rpout): ...@@ -93,7 +93,7 @@ def copy(rpin, rpout):
rpout.delete() # easier to write that compare rpout.delete() # easier to write that compare
else: return else: return
if rpin.isreg(): copy_reg_file(rpin, rpout) if rpin.isreg(): copy_reg_file(rpin, rpout, compress)
elif rpin.isdir(): rpout.mkdir() elif rpin.isdir(): rpout.mkdir()
elif rpin.issym(): rpout.symlink(rpin.readlink()) elif rpin.issym(): rpout.symlink(rpin.readlink())
elif rpin.ischardev(): elif rpin.ischardev():
...@@ -106,15 +106,16 @@ def copy(rpin, rpout): ...@@ -106,15 +106,16 @@ def copy(rpin, rpout):
elif rpin.issock(): rpout.mksock() elif rpin.issock(): rpout.mksock()
else: raise RPathException("File %s has unknown type" % rpin.path) else: raise RPathException("File %s has unknown type" % rpin.path)
def copy_reg_file(rpin, rpout): def copy_reg_file(rpin, rpout, compress = 0):
"""Copy regular file rpin to rpout, possibly avoiding connection""" """Copy regular file rpin to rpout, possibly avoiding connection"""
try: try:
if rpout.conn is rpin.conn: if (rpout.conn is rpin.conn and
rpout.conn.shutil.copyfile(rpin.path, rpout.path) rpout.conn is not Globals.local_connection):
rpout.conn.rpath.copy_reg_file(rpin.path, rpout.path, compress)
rpout.setdata() rpout.setdata()
return return
except AttributeError: pass except AttributeError: pass
rpout.write_from_fileobj(rpin.open("rb")) rpout.write_from_fileobj(rpin.open("rb"), compress = compress)
def cmp(rpin, rpout): def cmp(rpin, rpout):
"""True if rpin has the same data as rpout """True if rpin has the same data as rpout
...@@ -179,9 +180,9 @@ def cmp_attribs(rp1, rp2): ...@@ -179,9 +180,9 @@ def cmp_attribs(rp1, rp2):
(rp1.path, rp2.path, result), 7) (rp1.path, rp2.path, result), 7)
return result return result
def copy_with_attribs(rpin, rpout): def copy_with_attribs(rpin, rpout, compress = 0):
"""Copy file and then copy over attributes""" """Copy file and then copy over attributes"""
copy(rpin, rpout) copy(rpin, rpout, compress)
if rpin.lstat(): copy_attribs(rpin, rpout) if rpin.lstat(): copy_attribs(rpin, rpout)
def quick_cmp_with_attribs(rp1, rp2): def quick_cmp_with_attribs(rp1, rp2):
...@@ -278,9 +279,9 @@ class RORPath: ...@@ -278,9 +279,9 @@ class RORPath:
self.data[key] != other.data[key]): return None self.data[key] != other.data[key]): return None
return 1 return 1
def equal_verbose(self, other): def equal_verbose(self, other, check_index = 1):
"""Like __eq__, but log more information. Useful when testing""" """Like __eq__, but log more information. Useful when testing"""
if self.index != other.index: if check_index and self.index != other.index:
log.Log("Index %s != index %s" % (self.index, other.index), 2) log.Log("Index %s != index %s" % (self.index, other.index), 2)
return None return None
...@@ -372,6 +373,10 @@ class RORPath: ...@@ -372,6 +373,10 @@ class RORPath:
"""Return permission block of file""" """Return permission block of file"""
return self.data['perms'] return self.data['perms']
def hassize(self):
"""True if rpath has a size parameter"""
return self.data.has_key('size')
def getsize(self): def getsize(self):
"""Return length of file in bytes""" """Return length of file in bytes"""
return self.data['size'] return self.data['size']
...@@ -398,7 +403,8 @@ class RORPath: ...@@ -398,7 +403,8 @@ class RORPath:
def getnumlinks(self): def getnumlinks(self):
"""Number of places inode is linked to""" """Number of places inode is linked to"""
return self.data['nlink'] try: return self.data['nlink']
except KeyError: return 1
def readlink(self): def readlink(self):
"""Wrapper around os.readlink()""" """Wrapper around os.readlink()"""
...@@ -446,9 +452,13 @@ class RORPath: ...@@ -446,9 +452,13 @@ class RORPath:
""" """
return self.data.has_key('linked') return self.data.has_key('linked')
def flaglinked(self): def get_link_flag(self):
"""Return previous index that a file is hard linked to"""
return self.data['linked']
def flaglinked(self, index):
"""Signal that rorp is a signature/diff for a hardlink file""" """Signal that rorp is a signature/diff for a hardlink file"""
self.data['linked'] = 1 self.data['linked'] = index
def open(self, mode): def open(self, mode):
"""Return file type object if any was given using self.setfile""" """Return file type object if any was given using self.setfile"""
...@@ -742,7 +752,6 @@ class RPath(RORPath): ...@@ -742,7 +752,6 @@ class RPath(RORPath):
def append_path(self, ext, new_index = ()): def append_path(self, ext, new_index = ()):
"""Like append, but add ext to path instead of to index""" """Like append, but add ext to path instead of to index"""
assert not self.index # doesn't make sense if index isn't ()
return self.__class__(self.conn, "/".join((self.base, ext)), new_index) return self.__class__(self.conn, "/".join((self.base, ext)), new_index)
def new_index(self, index): def new_index(self, index):
...@@ -822,8 +831,8 @@ class RPath(RORPath): ...@@ -822,8 +831,8 @@ class RPath(RORPath):
return self.inc_type return self.inc_type
def getinctime(self): def getinctime(self):
"""Return timestring of an increment file""" """Return time in seconds of an increment file"""
return self.inc_timestr return Time.stringtotime(self.inc_timestr)
def getincbase(self): def getincbase(self):
"""Return the base filename of an increment file in rp form""" """Return the base filename of an increment file in rp form"""
...@@ -862,22 +871,4 @@ class RPathFileHook: ...@@ -862,22 +871,4 @@ class RPathFileHook:
self.closing_thunk() self.closing_thunk()
return result return result
# Import these late to avoid circular dependencies
#import FilenameMapping
#from lazy import *
#from selection import *
#from highlevel import *
#class RpathDeleter(ITRBranch):
# """Delete a directory. Called by RPath.delete()"""
# def start_process(self, index, rp):
# self.rp = rp
#
# def end_process(self):
# if self.rp.isdir(): self.rp.rmdir()
# else: self.rp.delete()
#
# def can_fast_process(self, index, rp): return not rp.isdir()
# def fast_process(self, index, rp): rp.delete()
...@@ -94,29 +94,20 @@ class Select: ...@@ -94,29 +94,20 @@ class Select:
self.prefix = self.rpath.path self.prefix = self.rpath.path
self.quoting_on = Globals.quoting_enabled and quoted_filenames self.quoting_on = Globals.quoting_enabled and quoted_filenames
def set_iter(self, starting_index = None, iterate_parents = None, def set_iter(self, iterate_parents = None, sel_func = None):
sel_func = None):
"""Initialize more variables, get ready to iterate """Initialize more variables, get ready to iterate
Will iterate indicies greater than starting_index. If Selection function sel_func is called on each rpath and is
iterate_parents is true, will also include parents of usually self.Select. Returns self just for convenience.
starting_index in iteration. Selection function sel_func is
called on each rpath and is usually self.Select. Returns self
just for convenience.
""" """
if not sel_func: sel_func = self.Select if not sel_func: sel_func = self.Select
self.rpath.setdata() # this may have changed since Select init self.rpath.setdata() # this may have changed since Select init
if starting_index is not None: if self.quoting_on:
self.starting_index = starting_index
self.iter = self.iterate_starting_from(self.rpath,
self.iterate_starting_from, sel_func)
elif self.quoting_on:
self.iter = self.Iterate(self.rpath, self.Iterate, sel_func) self.iter = self.Iterate(self.rpath, self.Iterate, sel_func)
else: self.iter = self.Iterate_fast(self.rpath, sel_func) else: self.iter = self.Iterate_fast(self.rpath, sel_func)
# only iterate parents if we are not starting from beginning # only iterate parents if we are not starting from beginning
self.iterate_parents = starting_index is not None and iterate_parents
self.next = self.iter.next self.next = self.iter.next
self.__iter__ = lambda: self self.__iter__ = lambda: self
return self return self
...@@ -149,6 +140,7 @@ class Select: ...@@ -149,6 +140,7 @@ class Select:
elif s == 2 and new_rpath.isdir(): yield (new_rpath, 1) elif s == 2 and new_rpath.isdir(): yield (new_rpath, 1)
yield rpath yield rpath
if not rpath.isdir(): return
diryield_stack = [diryield(rpath)] diryield_stack = [diryield(rpath)]
delayed_rp_stack = [] delayed_rp_stack = []
...@@ -214,26 +206,6 @@ class Select: ...@@ -214,26 +206,6 @@ class Select:
for rp in rec_func(new_rp, rec_func, sel_func): for rp in rec_func(new_rp, rec_func, sel_func):
yield rp yield rp
def iterate_starting_from(self, rpath, rec_func, sel_func):
"""Like Iterate, but only yield indicies > self.starting_index"""
if rpath.index > self.starting_index: # past starting_index
for rp in self.Iterate(rpath, self.Iterate, sel_func):
yield rp
elif (rpath.index == self.starting_index[:len(rpath.index)]
and rpath.isdir()):
# May encounter starting index on this branch
if self.iterate_parents: yield rpath
for rp in self.iterate_in_dir(rpath, self.iterate_starting_from,
sel_func): yield rp
# def iterate_with_finalizer(self):
# """Like Iterate, but missing some options, and add finalizer"""
# finalize = IterTreeReducer(DestructiveSteppingFinalizer, ())
# for rp in self:
# yield rp
# finalize(rp.index, rp))
# finalize.Finish()
def Select(self, rp): def Select(self, rp):
"""Run through the selection functions and return dominant val 0/1/2""" """Run through the selection functions and return dominant val 0/1/2"""
for sf in self.selection_functions: for sf in self.selection_functions:
......
...@@ -19,8 +19,8 @@ ...@@ -19,8 +19,8 @@
"""Generate and process aggregated backup information""" """Generate and process aggregated backup information"""
import re, os import re, os, time
import Globals, TempFile, robust, Time, rorpiter import Globals, TempFile, robust, Time, rorpiter, increment
class StatsException(Exception): pass class StatsException(Exception): pass
...@@ -73,6 +73,10 @@ class StatsObj: ...@@ -73,6 +73,10 @@ class StatsObj:
"""Add 1 to value of attribute""" """Add 1 to value of attribute"""
self.__dict__[attr] += 1 self.__dict__[attr] += 1
def add_to_stat(self, attr, value):
"""Add value to given attribute"""
self.__dict__[attr] += value
def get_total_dest_size_change(self): def get_total_dest_size_change(self):
"""Return total destination size change """Return total destination size change
...@@ -215,12 +219,9 @@ class StatsObj: ...@@ -215,12 +219,9 @@ class StatsObj:
def write_stats_to_rp(self, rp): def write_stats_to_rp(self, rp):
"""Write statistics string to given rpath""" """Write statistics string to given rpath"""
tf = TempFile.new(rp) fp = rp.open("wb")
def init_thunk(): fp.write(self.get_stats_string())
fp = tf.open("w") assert not fp.close()
fp.write(self.get_stats_string())
fp.close()
robust.make_tf_robustaction(init_thunk, (tf,), (rp,)).execute()
def read_stats_from_rp(self, rp): def read_stats_from_rp(self, rp):
"""Set statistics from rpath, return self for convenience""" """Set statistics from rpath, return self for convenience"""
...@@ -263,81 +264,81 @@ class StatsObj: ...@@ -263,81 +264,81 @@ class StatsObj:
return s return s
class ITRB(rorpiter.ITRBranch, StatsObj): class StatFileObj(StatsObj):
"""Keep track of per directory statistics """Build on StatsObj, add functions for processing files"""
def __init__(self, start_time = None):
This is subclassed by the mirroring and incrementing ITRs. """StatFileObj initializer - zero out file attributes"""
StatsObj.__init__(self)
""" for attr in self.stat_file_attrs: self.set_stat(attr, 0)
def __init__(self): if start_time is None: start_time = Time.curtime
"""StatsITR initializer - zero out statistics""" self.StartTime = start_time
attr_dict = self.__dict__ self.Errors = 0
for attr in StatsObj.stat_file_attrs: attr_dict[attr] = 0
self.ElapsedTime = self.Filename = None
def start_stats(self, mirror_dsrp):
"""Record status of mirror dsrp
This is called before the mirror is processed so we remember def add_source_file(self, src_rorp):
the old state. """Add stats of source file"""
"""
if mirror_dsrp.lstat():
self.mirror_base_exists = 1
self.mirror_base_size = self.stats_getsize(mirror_dsrp)
else: self.mirror_base_exists = None
def stats_getsize(self, rp):
"""Return size of rp, with error checking"""
try: return rp.getsize()
except KeyError: return 0
def end_stats(self, diff_rorp, mirror_dsrp, inc_rp = None):
"""Set various statistics after mirror processed"""
if mirror_dsrp.lstat():
source_size = self.stats_getsize(mirror_dsrp)
self.SourceFiles += 1
self.SourceFileSize += source_size
if self.mirror_base_exists:
self.MirrorFiles += 1
self.MirrorFileSize += self.mirror_base_size
if diff_rorp: # otherwise no change
self.ChangedFiles += 1
self.ChangedSourceSize += source_size
self.ChangedMirrorSize += self.mirror_base_size
self.stats_incr_incfiles(inc_rp)
else: # new file was created
self.NewFiles += 1
self.NewFileSize += source_size
self.stats_incr_incfiles(inc_rp)
else:
if self.mirror_base_exists: # file was deleted from mirror
self.MirrorFiles += 1
self.MirrorFileSize += self.mirror_base_size
self.DeletedFiles += 1
self.DeletedFileSize += self.mirror_base_size
self.stats_incr_incfiles(inc_rp)
def fast_process(self, mirror_rorp):
"""Use when there is no change from source to mirror"""
source_size = self.stats_getsize(mirror_rorp)
self.SourceFiles += 1 self.SourceFiles += 1
self.MirrorFiles += 1 if src_rorp.isreg(): self.SourceFileSize += src_rorp.getsize()
self.SourceFileSize += source_size
self.MirrorFileSize += source_size
def stats_incr_incfiles(self, inc_rp):
"""Increment IncrementFile statistics"""
if inc_rp:
self.IncrementFiles += 1
self.IncrementFileSize += self.stats_getsize(inc_rp)
def add_file_stats(self, branch):
"""Add all file statistics from branch to current totals"""
for attr in self.stat_file_attrs:
self.__dict__[attr] += branch.__dict__[attr]
def add_dest_file(self, dest_rorp):
"""Add stats of destination size"""
self.MirrorFiles += 1
if dest_rorp.isreg(): self.MirrorFileSize += dest_rorp.getsize()
def add_changed(self, src_rorp, dest_rorp):
"""Update stats when src_rorp changes to dest_rorp"""
if src_rorp and src_rorp.lstat() and dest_rorp and dest_rorp.lstat():
self.ChangedFiles += 1
if src_rorp.isreg(): self.ChangedSourceSize += src_rorp.getsize()
if dest_rorp.isreg(): self.ChangedMirrorSize += dest_rorp.getsize()
elif src_rorp and src_rorp.lstat():
self.NewFiles += 1
if src_rorp.isreg(): self.NewFileSize += src_rorp.getsize()
elif dest_rorp and dest_rorp.lstat():
self.DeletedFiles += 1
if dest_rorp.isreg(): self.DeletedFileSize += dest_rorp.getsize()
def add_increment(self, inc_rorp):
"""Update stats with increment rorp"""
self.IncrementFiles += 1
if inc_rorp.isreg(): self.IncrementFileSize += inc_rorp.getsize()
def add_error(self):
"""Increment error stat by 1"""
self.Errors += 1
def finish(self, end_time = None):
"""Record end time and set other stats"""
if end_time is None: end_time = time.time()
self.EndTime = end_time
_active_statfileobj = None
def init_statfileobj():
"""Return new stat file object, record as active stat object"""
global _active_statfileobj
assert not _active_statfileobj, _active_statfileobj
_active_statfileobj = StatFileObj()
return _active_statfileobj
def get_active_statfileobj():
"""Return active stat file object if it exists"""
if _active_statfileobj: return _active_statfileobj
else: return None
def record_error():
"""Record error on active statfileobj, if there is one"""
if _active_statfileobj: _active_statfileobj.add_error()
def process_increment(inc_rorp):
"""Add statistics of increment rp incrp if there is active statfile"""
if _active_statfileobj: _active_statfileobj.add_increment(inc_rorp)
def write_active_statfileobj():
"""Write active StatFileObj object to session statistics file"""
global _active_statfileobj
assert _active_statfileobj
rp_base = Globals.rbdir.append("session_statistics")
session_stats_rp = increment.get_inc_ext(rp_base, 'data', Time.curtime)
_active_statfileobj.finish()
_active_statfileobj.write_stats_to_rp(session_stats_rp)
_active_statfileobj = None
import sys, time
from commontest import *
from rdiff_backup import rpath, Globals
"""benchmark.py
When possible, use 'rdiff-backup' from the shell, which allows using
different versions of rdiff-backup by altering the PYTHONPATH. We
just use clock time, so this isn't exact at all.
"""
output_local = 1
output_desc = "testfiles/output"
new_pythonpath = None
def run_cmd(cmd):
"""Run the given cmd, return the amount of time it took"""
if new_pythonpath: full_cmd = "PYTHONPATH=%s %s" % (new_pythonpath, cmd)
else: full_cmd = cmd
print "Running command '%s'" % (full_cmd,)
t = time.time()
assert not os.system(full_cmd)
return time.time() - t
def create_many_files(dirname, s, count = 1000):
"""Create many short files in the dirname directory
There will be count files in the directory, and each file will
contain the string s.
"""
Myrm("testfiles/many_out")
dir_rp = rpath.RPath(Globals.local_connection, dirname)
dir_rp.mkdir()
for i in xrange(count):
rp = dir_rp.append(str(i))
fp = rp.open("wb")
fp.write(s)
assert not fp.close()
def create_nested(dirname, s, depth, branch_factor = 10):
"""Create many short files in branching directory"""
def write(rp):
fp = rp.open("wb")
fp.write(s)
assert not fp.close()
def helper(rp, depth):
rp.mkdir()
sub_rps = map(lambda i: rp.append(str(i)), range(branch_factor))
if depth == 1: map(write, sub_rps)
else: map(lambda rp: helper(rp, depth-1), sub_rps)
Myrm("testfiles/nested_out")
helper(rpath.RPath(Globals.local_connection, dirname), depth)
def benchmark(backup_cmd, restore_cmd, desc, update_func = None):
"""Print benchmark using backup_cmd and restore_cmd
If update_func is given, run it and then do backup a third time.
"""
print "Initially backing up %s: %ss" % (desc, run_cmd(backup_cmd))
print "Updating %s, no change: %ss" % (desc, run_cmd(backup_cmd))
if update_func:
update_func()
print "Updating %s, all changed: %ss" % (desc, run_cmd(backup_cmd))
Myrm("testfiles/rest_out")
print "Restoring %s to empty dir: %ss" % (desc, run_cmd(restore_cmd))
print "Restoring %s to unchanged dir: %ss" % (desc, run_cmd(restore_cmd))
def many_files():
"""Time backup and restore of 2000 files"""
count = 2000
create_many_files("testfiles/many_out", "a", count)
backup_cmd = "rdiff-backup testfiles/many_out " + output_desc
restore_cmd = "rdiff-backup --force -r now %s testfiles/rest_out" % \
(output_desc,)
update_func = lambda: create_many_files("testfiles/many_out", "e", count)
benchmark(backup_cmd, restore_cmd, "2000 1-byte files", update_func)
def many_files_rsync():
"""Test rsync benchmark"""
count = 2000
create_many_files("testfiles/many_out", "a", count)
rsync_command = ("rsync -e ssh -aH --delete testfiles/many_out " +
output_desc)
print "Initial rsync: %ss" % (run_cmd(rsync_command),)
print "rsync update: %ss" % (run_cmd(rsync_command),)
create_many_files("testfiles/many_out", "e", count)
print "Update changed rsync: %ss" % (run_cmd(rsync_command),)
def nested_files():
"""Time backup and restore of 10000 nested files"""
depth = 4
create_nested("testfiles/nested_out", "a", depth)
backup_cmd = "rdiff-backup testfiles/nested_out " + output_desc
restore_cmd = "rdiff-backup --force -r now %s testfiles/rest_out" % \
(output_desc,)
update_func = lambda: create_nested("testfiles/nested_out", "e", depth)
benchmark(backup_cmd, restore_cmd, "10000 1-byte nested files",
update_func)
def nested_files_rsync():
"""Test rsync on nested files"""
depth = 4
create_nested("testfiles/nested_out", "a", depth)
rsync_command = ("rsync -e ssh -aH --delete testfiles/nested_out " +
output_desc)
print "Initial rsync: %ss" % (run_cmd(rsync_command),)
print "rsync update: %ss" % (run_cmd(rsync_command),)
create_nested("testfiles/nested_out", "e", depth)
print "Update changed rsync: %ss" % (run_cmd(rsync_command),)
if len(sys.argv) < 2 or len(sys.argv) > 3:
print "Syntax: benchmark.py benchmark_func [output_description]"
print
print "Where output_description defaults to 'testfiles/output'."
print "Currently benchmark_func includes:"
print "'many_files', 'many_files_rsync', and, 'nested_files'."
sys.exit(1)
if len(sys.argv) == 3:
output_desc = sys.argv[2]
if ":" in output_desc: output_local = None
if output_local:
assert not rpath.RPath(Globals.local_connection, output_desc).lstat(), \
"Outfile file %s exists, try deleting it first" % (output_desc,)
if os.environ.has_key('BENCHMARKPYPATH'):
new_pythonpath = os.environ['BENCHMARKPYPATH']
function_name = sys.argv[1]
print "Running ", function_name
eval(sys.argv[1])()
...@@ -3,7 +3,7 @@ import os, sys ...@@ -3,7 +3,7 @@ import os, sys
from rdiff_backup.log import Log from rdiff_backup.log import Log
from rdiff_backup.rpath import RPath from rdiff_backup.rpath import RPath
from rdiff_backup import Globals, Hardlink, SetConnections, Main, \ from rdiff_backup import Globals, Hardlink, SetConnections, Main, \
selection, highlevel, lazy, Time, rpath selection, lazy, Time, rpath
SourceDir = "../src" SourceDir = "../src"
AbsCurdir = os.getcwd() # Absolute path name of current directory AbsCurdir = os.getcwd() # Absolute path name of current directory
...@@ -13,6 +13,9 @@ __no_execute__ = 1 # Keeps the actual rdiff-backup program from running ...@@ -13,6 +13,9 @@ __no_execute__ = 1 # Keeps the actual rdiff-backup program from running
def Myrm(dirstring): def Myrm(dirstring):
"""Run myrm on given directory string""" """Run myrm on given directory string"""
root_rp = rpath.RPath(Globals.local_connection, dirstring)
for rp in selection.Select(root_rp).set_iter():
if rp.isdir(): rp.chmod(0700) # otherwise may not be able to remove
assert not os.system("rm -rf %s" % (dirstring,)) assert not os.system("rm -rf %s" % (dirstring,))
def Make(): def Make():
...@@ -21,6 +24,13 @@ def Make(): ...@@ -21,6 +24,13 @@ def Make():
os.system("python ./Make") os.system("python ./Make")
os.chdir(AbsCurdir) os.chdir(AbsCurdir)
def MakeOutputDir():
"""Initialize the testfiles/output directory"""
Myrm("testfiles/output")
rp = rpath.RPath(Globals.local_connection, "testfiles/output")
rp.mkdir()
return rp
def rdiff_backup(source_local, dest_local, src_dir, dest_dir, def rdiff_backup(source_local, dest_local, src_dir, dest_dir,
current_time = None, extra_options = ""): current_time = None, extra_options = ""):
"""Run rdiff-backup with the given options """Run rdiff-backup with the given options
...@@ -121,6 +131,7 @@ def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time): ...@@ -121,6 +131,7 @@ def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
the testing directory and will be modified for remote trials. the testing directory and will be modified for remote trials.
""" """
Main.force = 1
remote_schema = '%s' remote_schema = '%s'
#_reset_connections() #_reset_connections()
if not mirror_local: if not mirror_local:
......
import unittest, os, re, sys, time import unittest, os, re, sys, time
from commontest import * from commontest import *
from rdiff_backup import Globals, log, rpath from rdiff_backup import Globals, log, rpath, robust
"""Regression tests""" """Regression tests"""
Globals.exclude_mirror_regexps = [re.compile(".*/rdiff-backup-data")] Globals.exclude_mirror_regexps = [re.compile(".*/rdiff-backup-data")]
log.Log.setverbosity(7) log.Log.setverbosity(3)
lc = Globals.local_connection lc = Globals.local_connection
...@@ -43,7 +43,7 @@ class PathSetter(unittest.TestCase): ...@@ -43,7 +43,7 @@ class PathSetter(unittest.TestCase):
def reset_schema(self): def reset_schema(self):
self.rb_schema = SourceDir + \ self.rb_schema = SourceDir + \
"/../rdiff-backup -v3 --remote-schema './chdir-wrapper2 %s' " "/../rdiff-backup -v7 --remote-schema './chdir-wrapper2 %s' "
def refresh(self, *rp_list): def refresh(self, *rp_list):
"""Reread data for the given rps""" """Reread data for the given rps"""
...@@ -168,8 +168,9 @@ class PathSetter(unittest.TestCase): ...@@ -168,8 +168,9 @@ class PathSetter(unittest.TestCase):
def getinc_paths(self, basename, directory): def getinc_paths(self, basename, directory):
"""Return increment.______.dir paths""" """Return increment.______.dir paths"""
incfiles = filter(lambda s: s.startswith(basename), dirrp = rpath.RPath(Globals.local_connection, directory)
os.listdir(directory)) incfiles = [filename for filename in robust.listrp(dirrp)
if filename.startswith(basename)]
incfiles.sort() incfiles.sort()
incrps = map(lambda f: rpath.RPath(lc, directory+"/"+f), incfiles) incrps = map(lambda f: rpath.RPath(lc, directory+"/"+f), incfiles)
return map(lambda x: x.path, filter(rpath.RPath.isincfile, incrps)) return map(lambda x: x.path, filter(rpath.RPath.isincfile, incrps))
...@@ -196,26 +197,15 @@ class Final(PathSetter): ...@@ -196,26 +197,15 @@ class Final(PathSetter):
self.set_connections(None, None, "test2/tmp", "../../") self.set_connections(None, None, "test2/tmp", "../../")
self.runtest() self.runtest()
# def testMirroringLocal(self): def testProcLocal(self):
# """Run mirroring only everything remote""" """Test initial backup of /proc locally"""
# self.delete_tmpdirs() Myrm("testfiles/procoutput")
# self.set_connections(None, None, None, None) self.set_connections(None, None, None, None)
# self.exec_rb_extra_args(10000, "-m", self.exec_rb(None, '../../../../../../proc', 'testfiles/procoutput')
# "testfiles/various_file_types",
# "testfiles/output")
# assert CompareRecursive(Local.vftrp, Local.rpout, exclude_rbdir = None)
# def testMirroringRemote(self):
# """Run mirroring only everything remote"""
# self.delete_tmpdirs()
# self.set_connections("test1/", "../", "test2/tmp/", "../../")
# self.exec_rb_extra_args(10000, "-m",
# "testfiles/various_file_types",
# "testfiles/output")
# assert CompareRecursive(Local.vftrp, Local.rpout, exclude_rbdir = None)
def testProcRemote(self): def testProcRemote(self):
"""Test mirroring proc""" """Test mirroring proc"""
Myrm("testfiles/procoutput")
self.set_connections(None, None, "test2/tmp/", "../../") self.set_connections(None, None, "test2/tmp/", "../../")
self.exec_rb(None, '../../../../../../proc', 'testfiles/procoutput') self.exec_rb(None, '../../../../../../proc', 'testfiles/procoutput')
...@@ -352,44 +342,5 @@ testfiles/increment2/changed_dir""") ...@@ -352,44 +342,5 @@ testfiles/increment2/changed_dir""")
self.assertRaises(OSError, os.lstat, self.assertRaises(OSError, os.lstat,
'testfiles/restoretarget1/executable2') 'testfiles/restoretarget1/executable2')
class FinalCorrupt(PathSetter):
def testBackupOverlay(self):
"""Test backing up onto a directory already backed up for that time
This will test to see if rdiff-backup will ignore files who
already have an increment where it wants to put something.
Just make sure rdiff-backup doesn't exit with an error.
"""
self.delete_tmpdirs()
assert not os.system("cp -a testfiles/corruptbackup testfiles/output")
self.set_connections(None, None, None, None)
self.exec_rb(None, 'testfiles/corruptbackup_source',
'testfiles/output')
def testBackupOverlayRemote(self):
"""Like above but destination is remote"""
self.delete_tmpdirs()
assert not os.system("cp -a testfiles/corruptbackup testfiles/output")
self.set_connections(None, None, "test1/", '../')
self.exec_rb(None, 'testfiles/corruptbackup_source',
'testfiles/output')
def testCheckpointData(self):
"""Destination directory has bad checkpoint data, no sym"""
self.delete_tmpdirs()
assert not os.system("cp -a testfiles/corrupt_dest1 testfiles/output")
self.set_connections(None, None, None, None)
self.exec_rb(None, 'testfiles/various_file_types', 'testfiles/output')
def testCheckpointData2(self):
"""Destination directory has bad checkpoint data, with sym"""
self.delete_tmpdirs()
assert not os.system("cp -a testfiles/corrupt_dest2 testfiles/output")
self.set_connections(None, None, None, None)
self.exec_rb(None, 'testfiles/various_file_types', 'testfiles/output')
if __name__ == "__main__": unittest.main() if __name__ == "__main__": unittest.main()
import os, unittest, time
import os, unittest
from commontest import * from commontest import *
from rdiff_backup import Globals, Hardlink, selection, rpath from rdiff_backup import Globals, Hardlink, selection, rpath
...@@ -30,48 +29,6 @@ class HardlinkTest(unittest.TestCase): ...@@ -30,48 +29,6 @@ class HardlinkTest(unittest.TestCase):
assert not CompareRecursive(self.hardlink_dir1, self.hardlink_dir2, assert not CompareRecursive(self.hardlink_dir1, self.hardlink_dir2,
compare_hardlinks = 1) compare_hardlinks = 1)
def testCheckpointing(self):
"""Test saving and recovering of various dictionaries"""
d1 = {1:1}
d2 = {2:2}
d3 = {3:3}
d4 = {}
Hardlink._src_inode_indicies = d1
Hardlink._src_index_indicies = d2
Hardlink._dest_inode_indicies = d3
Hardlink._dest_index_indicies = d4
self.reset_output()
Time.setcurtime(12345)
Globals.isbackup_writer = 1
Hardlink.final_checkpoint(self.outputrp)
reset_hardlink_dicts()
assert Hardlink.retrieve_checkpoint(self.outputrp, 12345)
assert Hardlink._src_inode_indicies == d1, \
Hardlink._src_inode_indicies
assert Hardlink._src_index_indicies == d2, \
Hardlink._src_index_indicies
assert Hardlink._dest_inode_indicies == d3, \
Hardlink._dest_inode_indicies
assert Hardlink._dest_index_indicies == d4, \
Hardlink._dest_index_indicies
def testFinalwrite(self):
"""Test writing of the final database"""
Globals.isbackup_writer = 1
Time.setcurtime(123456)
Globals.rbdir = self.outputrp
finald = Hardlink._src_index_indicies = {'hello':'world'}
self.reset_output()
Hardlink.final_writedata()
Hardlink._src_index_indicies = None
assert Hardlink.retrieve_final(123456)
assert Hardlink._src_index_indicies == finald
def testBuildingDict(self): def testBuildingDict(self):
"""See if the partial inode dictionary is correct""" """See if the partial inode dictionary is correct"""
Globals.preserve_hardlinks = 1 Globals.preserve_hardlinks = 1
...@@ -143,6 +100,74 @@ class HardlinkTest(unittest.TestCase): ...@@ -143,6 +100,74 @@ class HardlinkTest(unittest.TestCase):
BackupRestoreSeries(None, None, dirlist, compare_hardlinks=1) BackupRestoreSeries(None, None, dirlist, compare_hardlinks=1)
BackupRestoreSeries(1, 1, dirlist, compare_hardlinks=1) BackupRestoreSeries(1, 1, dirlist, compare_hardlinks=1)
def testInnerRestore(self):
"""Restore part of a dir, see if hard links preserved"""
MakeOutputDir()
output = rpath.RPath(Globals.local_connection,
"testfiles/output")
# Now set up directories out_hardlink1 and out_hardlink2
hlout1 = rpath.RPath(Globals.local_connection,
"testfiles/out_hardlink1")
if hlout1.lstat(): hlout1.delete()
hlout1.mkdir()
hlout1_sub = hlout1.append("subdir")
hlout1_sub.mkdir()
hl1_1 = hlout1_sub.append("hardlink1")
hl1_2 = hlout1_sub.append("hardlink2")
hl1_3 = hlout1_sub.append("hardlink3")
hl1_4 = hlout1_sub.append("hardlink4")
# 1 and 2 are hard linked, as are 3 and 4
hl1_1.touch()
hl1_2.hardlink(hl1_1.path)
hl1_3.touch()
hl1_4.hardlink(hl1_3.path)
hlout2 = rpath.RPath(Globals.local_connection,
"testfiles/out_hardlink2")
if hlout2.lstat(): hlout2.delete()
assert not os.system("cp -a testfiles/out_hardlink1 "
"testfiles/out_hardlink2")
hlout2_sub = hlout2.append("subdir")
hl2_1 = hlout2_sub.append("hardlink1")
hl2_2 = hlout2_sub.append("hardlink2")
hl2_3 = hlout2_sub.append("hardlink3")
hl2_4 = hlout2_sub.append("hardlink4")
# Now 2 and 3 are hard linked, also 1 and 4
rpath.copy_with_attribs(hl1_1, hl2_1)
rpath.copy_with_attribs(hl1_2, hl2_2)
hl2_3.delete()
hl2_3.hardlink(hl2_2.path)
hl2_4.delete()
hl2_4.hardlink(hl2_1.path)
rpath.copy_attribs(hlout1_sub, hlout2_sub)
InternalBackup(1, 1, hlout1.path, output.path)
time.sleep(1)
InternalBackup(1, 1, hlout2.path, output.path)
out2 = rpath.RPath(Globals.local_connection, "testfiles/out2")
hlout1 = out2.append("hardlink1")
hlout2 = out2.append("hardlink2")
hlout3 = out2.append("hardlink3")
hlout4 = out2.append("hardlink4")
if out2.lstat(): out2.delete()
InternalRestore(1, 1, "testfiles/output/subdir", "testfiles/out2", 1)
out2.setdata()
for rp in [hlout1, hlout2, hlout3, hlout4]: rp.setdata()
assert hlout1.getinode() == hlout2.getinode()
assert hlout3.getinode() == hlout4.getinode()
assert hlout1.getinode() != hlout3.getinode()
if out2.lstat(): out2.delete()
InternalRestore(1, 1, "testfiles/output/subdir", "testfiles/out2",
int(time.time()))
out2.setdata()
for rp in [hlout1, hlout2, hlout3, hlout4]: rp.setdata()
assert hlout1.getinode() == hlout4.getinode()
assert hlout2.getinode() == hlout3.getinode()
assert hlout1.getinode() != hlout2.getinode()
if __name__ == "__main__": unittest.main() if __name__ == "__main__": unittest.main()
import unittest, os, re, time import unittest, os, re, time
from commontest import * from commontest import *
from rdiff_backup import log, rpath, restore, increment, Time, \ from rdiff_backup import log, rpath, increment, Time, Rdiff, statistics
Rdiff, statistics
lc = Globals.local_connection lc = Globals.local_connection
Globals.change_source_perms = 1 Globals.change_source_perms = 1
...@@ -21,14 +20,14 @@ dir = getrp(".") ...@@ -21,14 +20,14 @@ dir = getrp(".")
sym = getrp("symbolic_link") sym = getrp("symbolic_link")
nothing = getrp("nothing") nothing = getrp("nothing")
target = rpath.RPath(lc, "testfiles/out") target = rpath.RPath(lc, "testfiles/output/out")
out2 = rpath.RPath(lc, "testfiles/out2") out2 = rpath.RPath(lc, "testfiles/output/out2")
out_gz = rpath.RPath(lc, "testfiles/out.gz") out_gz = rpath.RPath(lc, "testfiles/output/out.gz")
Time.setprevtime(999424113) Time.setprevtime(999424113)
prevtimestr = "2001-09-02T02:48:33-07:00" prevtimestr = "2001-09-02T02:48:33-07:00"
t_pref = "testfiles/out.2001-09-02T02:48:33-07:00" t_pref = "testfiles/output/out.2001-09-02T02:48:33-07:00"
t_diff = "testfiles/out.2001-09-02T02:48:33-07:00.diff" t_diff = "testfiles/output/out.2001-09-02T02:48:33-07:00.diff"
Globals.no_compression_regexp = \ Globals.no_compression_regexp = \
re.compile(Globals.no_compression_regexp_string, re.I) re.compile(Globals.no_compression_regexp_string, re.I)
...@@ -37,11 +36,12 @@ class inctest(unittest.TestCase): ...@@ -37,11 +36,12 @@ class inctest(unittest.TestCase):
"""Test the incrementRP function""" """Test the incrementRP function"""
def setUp(self): def setUp(self):
Globals.set('isbackup_writer',1) Globals.set('isbackup_writer',1)
MakeOutputDir()
def check_time(self, rp): def check_time(self, rp):
"""Make sure that rp is an inc file, and time is Time.prevtime""" """Make sure that rp is an inc file, and time is Time.prevtime"""
assert rp.isincfile(), rp assert rp.isincfile(), rp
t = Time.stringtotime(rp.getinctime()) t = rp.getinctime()
assert t == Time.prevtime, (t, Time.prevtime) assert t == Time.prevtime, (t, Time.prevtime)
def testreg(self): def testreg(self):
...@@ -114,7 +114,7 @@ class inctest(unittest.TestCase): ...@@ -114,7 +114,7 @@ class inctest(unittest.TestCase):
rp = increment.Increment(rf, rf2, target) rp = increment.Increment(rf, rf2, target)
self.check_time(rp) self.check_time(rp)
assert rpath.cmp_attribs(rp, rf2) assert rpath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2).execute() Rdiff.patch_local(rf, rp, out2)
assert rpath.cmp(rf2, out2) assert rpath.cmp(rf2, out2)
rp.delete() rp.delete()
out2.delete() out2.delete()
...@@ -125,7 +125,7 @@ class inctest(unittest.TestCase): ...@@ -125,7 +125,7 @@ class inctest(unittest.TestCase):
rp = increment.Increment(rf, rf2, target) rp = increment.Increment(rf, rf2, target)
self.check_time(rp) self.check_time(rp)
assert rpath.cmp_attribs(rp, rf2) assert rpath.cmp_attribs(rp, rf2)
Rdiff.patch_action(rf, rp, out2, delta_compressed = 1).execute() Rdiff.patch_local(rf, rp, out2, delta_compressed = 1)
assert rpath.cmp(rf2, out2) assert rpath.cmp(rf2, out2)
rp.delete() rp.delete()
out2.delete() out2.delete()
...@@ -139,86 +139,10 @@ class inctest(unittest.TestCase): ...@@ -139,86 +139,10 @@ class inctest(unittest.TestCase):
rp = increment.Increment(rf, out_gz, target) rp = increment.Increment(rf, out_gz, target)
self.check_time(rp) self.check_time(rp)
assert rpath.cmp_attribs(rp, out_gz) assert rpath.cmp_attribs(rp, out_gz)
Rdiff.patch_action(rf, rp, out2).execute() Rdiff.patch_local(rf, rp, out2)
assert rpath.cmp(out_gz, out2) assert rpath.cmp(out_gz, out2)
rp.delete() rp.delete()
out2.delete() out2.delete()
out_gz.delete() out_gz.delete()
class inctest2(unittest.TestCase):
"""Like inctest but contains more elaborate tests"""
def stats_check_initial(self, s):
"""Make sure stats object s compatible with initial mirroring
A lot of the off by one stuff is because the root directory
exists in the below examples.
"""
assert s.MirrorFiles == 1 or s.MirrorFiles == 0
assert s.MirrorFileSize < 20000
assert s.NewFiles <= s.SourceFiles <= s.NewFiles + 1
assert s.NewFileSize <= s.SourceFileSize <= s.NewFileSize + 20000
assert s.ChangedFiles == 1 or s.ChangedFiles == 0
assert s.ChangedSourceSize < 20000
assert s.ChangedMirrorSize < 20000
assert s.DeletedFiles == s.DeletedFileSize == 0
assert s.IncrementFileSize == 0
def testStatistics(self):
"""Test the writing of statistics
The file sizes are approximate because the size of directories
could change with different file systems...
"""
Globals.compression = 1
Myrm("testfiles/output")
InternalBackup(1, 1, "testfiles/stattest1", "testfiles/output")
InternalBackup(1, 1, "testfiles/stattest2", "testfiles/output",
time.time()+1)
rbdir = rpath.RPath(Globals.local_connection,
"testfiles/output/rdiff-backup-data")
#incs = Restore.get_inclist(rbdir.append("subdir").
# append("directory_statistics"))
#assert len(incs) == 2
#s1 = StatsObj().read_stats_from_rp(incs[0]) # initial mirror stats
#assert s1.SourceFiles == 2
#assert 400000 < s1.SourceFileSize < 420000
#self.stats_check_initial(s1)
#subdir_stats = StatsObj().read_stats_from_rp(incs[1]) # increment stats
#assert subdir_stats.SourceFiles == 2
#assert 400000 < subdir_stats.SourceFileSize < 420000
#assert subdir_stats.MirrorFiles == 2
#assert 400000 < subdir_stats.MirrorFileSize < 420000
#assert subdir_stats.NewFiles == subdir_stats.NewFileSize == 0
#assert subdir_stats.DeletedFiles == subdir_stats.DeletedFileSize == 0
#assert subdir_stats.ChangedFiles == 2
#assert 400000 < subdir_stats.ChangedSourceSize < 420000
#assert 400000 < subdir_stats.ChangedMirrorSize < 420000
#assert 10 < subdir_stats.IncrementFileSize < 20000
incs = restore.get_inclist(rbdir.append("session_statistics"))
assert len(incs) == 2
s2 = statistics.StatsObj().read_stats_from_rp(incs[0])
assert s2.SourceFiles == 7
assert 700000 < s2.SourceFileSize < 750000
self.stats_check_initial(s2)
root_stats = statistics.StatsObj().read_stats_from_rp(incs[1])
assert root_stats.SourceFiles == 7, root_stats.SourceFiles
assert 550000 < root_stats.SourceFileSize < 570000
assert root_stats.MirrorFiles == 7
assert 700000 < root_stats.MirrorFileSize < 750000
assert root_stats.NewFiles == 1
assert root_stats.NewFileSize == 0
assert root_stats.DeletedFiles == 1
assert root_stats.DeletedFileSize == 200000
assert 3 <= root_stats.ChangedFiles <= 4, root_stats.ChangedFiles
assert 450000 < root_stats.ChangedSourceSize < 470000
assert 400000 < root_stats.ChangedMirrorSize < 420000
assert 10 < root_stats.IncrementFileSize < 30000
if __name__ == '__main__': unittest.main() if __name__ == '__main__': unittest.main()
import unittest, os, cStringIO, time import unittest, os, cStringIO, time
from rdiff_backup.metadata import * from rdiff_backup.metadata import *
from rdiff_backup import rpath, connection, Globals, selection, \ from rdiff_backup import rpath, connection, Globals, selection
destructive_stepping
tempdir = rpath.RPath(Globals.local_connection, "testfiles/output") tempdir = rpath.RPath(Globals.local_connection, "testfiles/output")
......
...@@ -50,7 +50,7 @@ class RdiffTest(unittest.TestCase): ...@@ -50,7 +50,7 @@ class RdiffTest(unittest.TestCase):
self.delta.write_from_fileobj(Rdiff.get_delta_sigrp(self.signature, self.delta.write_from_fileobj(Rdiff.get_delta_sigrp(self.signature,
self.new)) self.new))
assert self.delta.lstat() assert self.delta.lstat()
Rdiff.patch_action(self.basis, self.delta, self.output).execute() Rdiff.patch_local(self.basis, self.delta, self.output)
assert rpath.cmp(self.new, self.output) assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist) map(rpath.RPath.delete, rplist)
...@@ -74,14 +74,14 @@ class RdiffTest(unittest.TestCase): ...@@ -74,14 +74,14 @@ class RdiffTest(unittest.TestCase):
os.system("mv %s %s" % (self.delta.path + ".gz", self.delta.path)) os.system("mv %s %s" % (self.delta.path + ".gz", self.delta.path))
self.delta.setdata() self.delta.setdata()
Rdiff.patch_action(self.basis, self.delta, self.output, Rdiff.patch_local(self.basis, self.delta, self.output,
delta_compressed = 1).execute() delta_compressed = 1)
assert rpath.cmp(self.new, self.output) assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist) map(rpath.RPath.delete, rplist)
def testWriteDelta(self): def testWriteDelta(self):
"""Test write delta feature of rdiff""" """Test write delta feature of rdiff"""
self.delta.delete() if self.delta.lstat(): self.delta.delete()
rplist = [self.basis, self.new, self.delta, self.output] rplist = [self.basis, self.new, self.delta, self.output]
MakeRandomFile(self.basis.path) MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path) MakeRandomFile(self.new.path)
...@@ -90,7 +90,7 @@ class RdiffTest(unittest.TestCase): ...@@ -90,7 +90,7 @@ class RdiffTest(unittest.TestCase):
Rdiff.write_delta(self.basis, self.new, self.delta) Rdiff.write_delta(self.basis, self.new, self.delta)
assert self.delta.lstat() assert self.delta.lstat()
Rdiff.patch_action(self.basis, self.delta, self.output).execute() Rdiff.patch_local(self.basis, self.delta, self.output)
assert rpath.cmp(self.new, self.output) assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist) map(rpath.RPath.delete, rplist)
...@@ -109,7 +109,7 @@ class RdiffTest(unittest.TestCase): ...@@ -109,7 +109,7 @@ class RdiffTest(unittest.TestCase):
os.system("gunzip " + delta_gz.path) os.system("gunzip " + delta_gz.path)
delta_gz.setdata() delta_gz.setdata()
self.delta.setdata() self.delta.setdata()
Rdiff.patch_action(self.basis, self.delta, self.output).execute() Rdiff.patch_local(self.basis, self.delta, self.output)
assert rpath.cmp(self.new, self.output) assert rpath.cmp(self.new, self.output)
map(rpath.RPath.delete, rplist) map(rpath.RPath.delete, rplist)
...@@ -128,7 +128,7 @@ class RdiffTest(unittest.TestCase): ...@@ -128,7 +128,7 @@ class RdiffTest(unittest.TestCase):
self.delta.write_from_fileobj(Rdiff.get_delta_sigrp(self.signature, self.delta.write_from_fileobj(Rdiff.get_delta_sigrp(self.signature,
self.new)) self.new))
assert self.delta.lstat() assert self.delta.lstat()
Rdiff.patch_action(self.basis, self.delta).execute() Rdiff.patch_local(self.basis, self.delta)
assert rpath.cmp(self.basis, self.new) assert rpath.cmp(self.basis, self.new)
map(rpath.RPath.delete, rplist) map(rpath.RPath.delete, rplist)
...@@ -141,31 +141,10 @@ class RdiffTest(unittest.TestCase): ...@@ -141,31 +141,10 @@ class RdiffTest(unittest.TestCase):
MakeRandomFile(self.basis.path) MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path) MakeRandomFile(self.new.path)
map(rpath.RPath.setdata, rplist) map(rpath.RPath.setdata, rplist)
Rdiff.copy_action(self.basis, self.new).execute() Rdiff.copy_local(self.basis, self.new)
assert rpath.cmp(self.basis, self.new) assert rpath.cmp(self.basis, self.new)
map(rpath.RPath.delete, rplist) map(rpath.RPath.delete, rplist)
def testPatchWithAttribs(self):
"""Using rdiff to copy two files with attributes"""
rplist = [self.basis, self.new, self.delta]
for rp in rplist:
if rp.lstat(): rp.delete()
MakeRandomFile(self.basis.path)
MakeRandomFile(self.new.path)
self.new.chmod(0401)
map(rpath.RPath.setdata, rplist)
Rdiff.write_delta(self.basis, self.new, self.delta)
rpath.copy_attribs(self.new, self.delta)
assert self.delta.getperms() == 0401
assert not self.basis == self.new
Rdiff.patch_with_attribs_action(self.basis, self.delta).execute()
if not self.basis == self.new:
print self.basis, self.new
assert 0
map(rpath.RPath.delete, rplist)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
import unittest, os import unittest, os
from commontest import * from commontest import *
from rdiff_backup import Globals, SetConnections, log, rpath from rdiff_backup import Globals, SetConnections, log, rpath, backup
"""Regression tests """Regression tests
...@@ -12,14 +12,14 @@ testfiles ...@@ -12,14 +12,14 @@ testfiles
Globals.set('change_source_perms', 1) Globals.set('change_source_perms', 1)
Globals.counter = 0 Globals.counter = 0
log.Log.setverbosity(7) log.Log.setverbosity(3)
def get_local_rp(extension):
return rpath.RPath(Globals.local_connection, "testfiles/" + extension)
class Local: class Local:
"""This is just a place to put increments relative to the local """This is just a place to put increments relative to the local
connection""" connection"""
def get_local_rp(extension):
return rpath.RPath(Globals.local_connection, "testfiles/" + extension)
inc1rp = get_local_rp('increment1') inc1rp = get_local_rp('increment1')
inc2rp = get_local_rp('increment2') inc2rp = get_local_rp('increment2')
inc3rp = get_local_rp('increment3') inc3rp = get_local_rp('increment3')
...@@ -152,98 +152,52 @@ class IncrementTest1(unittest.TestCase): ...@@ -152,98 +152,52 @@ class IncrementTest1(unittest.TestCase):
"""Increment/Restore when both directories are remote""" """Increment/Restore when both directories are remote"""
BackupRestoreSeries(None, None, self.dirlist) BackupRestoreSeries(None, None, self.dirlist)
def testNoWrite(self):
"""Test backup/restore on dirs without write permissions"""
def write_string(rp, s = ""):
"""Write string s to file"""
fp = rp.open("wb")
fp.write(s)
assert not fp.close()
class IncrementTest2(PathSetter): def make_subdirs():
def OldtestRecoveryLocal(self): """Make testfiles/no_write_out and testfiles/no_write_out2"""
"""Test to see if rdiff-backup can continue with bad increment""" nw_out1 = get_local_rp("no_write_out")
assert not os.system("rm -rf testfiles/recovery_out_backup") nw_out1.mkdir()
self.setPathnames(None, None, None, None)
Time.setprevtime(1006136450)
Time.setcurtime()
Globals.add_regexp('.*rdiff-backup-data', 1)
os.system('cp -a testfiles/recovery_out testfiles/recovery_out_backup')
recovery_in = self.get_src_rp('testfiles/recovery')
recovery_out = self.get_dest_rp('testfiles/recovery_out_backup')
recovery_inc = self.get_dest_rp('testfiles/recovery_out_backup/'
'rdiff-backup-data/increments')
highlevel.Mirror_and_increment(recovery_in, recovery_out, recovery_inc)
# Should probably check integrity of increments, but for now
# allow if it doesn't during the Mirror_and_increment
def OldtestRecoveryRemote(self):
"""Test Recovery with both connections remote"""
assert not os.system('rm -rf testfiles/recovery_out_backup')
self.setPathnames('test1', '../', 'test2/tmp', '../../')
Time.setprevtime(1006136450)
Time.setcurtime()
Globals.add_regexp('.*rdiff-backup-data', 1)
os.system('cp -a testfiles/recovery_out testfiles/recovery_out_backup')
recovery_in = self.get_src_rp('testfiles/recovery')
recovery_out = self.get_dest_rp('testfiles/recovery_out_backup')
recovery_inc = self.get_dest_rp('testfiles/recovery_out_backup/'
'rdiff-backup-data/increments')
highlevel.Mirror_and_increment(recovery_in, recovery_out, recovery_inc)
# Should probably check integrity of increments, but for now
# allow if it doesn't during the Mirror_and_increment
def runtest(self):
"""After setting connections, etc., run actual test using this"""
Time.setcurtime()
Main.backup_set_select(Local.inc1rp)
highlevel.Mirror(self.inc1rp, self.rpout)
assert CompareRecursive(Local.inc1rp, Local.rpout)
Time.setcurtime()
Time.setprevtime(999500000)
Main.backup_set_select(self.inc2rp)
highlevel.Mirror_and_increment(self.inc2rp, self.rpout, self.rpout_inc)
assert CompareRecursive(Local.inc2rp, Local.rpout)
Time.setcurtime() nw_out1_1 = get_local_rp("no_write_out/1")
Time.setprevtime(999510000) write_string(nw_out1_1)
Main.backup_set_select(self.inc3rp) nw_out1_1.chmod(0)
highlevel.Mirror_and_increment(self.inc3rp, self.rpout, self.rpout_inc)
assert CompareRecursive(Local.inc3rp, Local.rpout)
Time.setcurtime() nw_out1_2 = get_local_rp("no_write_out/2")
Time.setprevtime(999520000) write_string(nw_out1_2, 'e')
Main.backup_set_select(self.inc4rp) nw_out1_1.chmod(0400)
highlevel.Mirror_and_increment(self.inc4rp, self.rpout, self.rpout_inc)
assert CompareRecursive(Local.inc4rp, Local.rpout)
print "Restoring to self.inc4" nw1_sub = get_local_rp("no_write_out/subdir")
highlevel.Restore(999530000, self.rpout, self.get_inctup(), nw1_sub.mkdir()
self.rpout4)
assert CompareRecursive(Local.inc4rp, Local.rpout4)
print "Restoring to self.inc3" nw_out1_sub1 = get_local_rp("no_write_out/subdir/1")
highlevel.Restore(999520000, self.rpout, self.get_inctup(), write_string(nw_out1_sub1, 'f')
self.rpout3) nw1_sub.chmod(0500)
assert CompareRecursive(Local.inc3rp, Local.rpout3) nw_out1.chmod(0500)
print "Restoring to self.inc2" nw_out2 = get_local_rp("no_write_out2")
highlevel.Restore(999510000, self.rpout, self.get_inctup(), nw_out2.mkdir()
self.rpout2)
assert CompareRecursive(Local.inc2rp, Local.rpout2)
print "Restoring to self.inc1" nw_out2_1 = get_local_rp("no_write_out2/1")
highlevel.Restore(999500000, self.rpout, self.get_inctup(), write_string(nw_out2_1, 'g')
self.rpout1)
assert CompareRecursive(Local.inc1rp, Local.rpout1)
def get_inctup(self): nw_out2_2 = get_local_rp("no_write_out2/2")
"""Return inc tuples as expected by Restore.RestoreRecursive write_string(nw_out2_2, 'aeu')
nw_out1.chmod(0500)
Assumes output increment directory is Myrm("testfiles/no_write_out")
testfiles/output_inc._____. Myrm("testfiles/no_write_out2")
Myrm("testfiles/output")
""" make_subdirs()
filenames = filter(lambda x: x.startswith("output_inc."), BackupRestoreSeries(1, 1, ['testfiles/no_write_out',
Local.prefix.listdir()) 'testfiles/no_write_out2',
rplist = map(lambda x: Local.prefix.append(x), filenames) 'testfiles/empty'])
return IndexedTuple((), (Local.prefix.append("output_inc"), rplist))
class MirrorTest(PathSetter): class MirrorTest(PathSetter):
...@@ -317,7 +271,7 @@ class MirrorTest(PathSetter): ...@@ -317,7 +271,7 @@ class MirrorTest(PathSetter):
Globals.change_ownership = 1 Globals.change_ownership = 1
self.refresh(self.rootfiles, self.rootfiles_out, self.refresh(self.rootfiles, self.rootfiles_out,
Local.rootfiles, Local.rootfiles_out) # add uid/gid info Local.rootfiles, Local.rootfiles_out) # add uid/gid info
highlevel.Mirror(self.rootfiles, self.rootfiles_out) backup.Mirror(self.rootfiles, self.rootfiles_out)
assert CompareRecursive(Local.rootfiles, Local.rootfiles_out) assert CompareRecursive(Local.rootfiles, Local.rootfiles_out)
Globals.change_ownership = None Globals.change_ownership = None
self.refresh(self.rootfiles, self.rootfiles_out, self.refresh(self.rootfiles, self.rootfiles_out,
...@@ -330,29 +284,13 @@ class MirrorTest(PathSetter): ...@@ -330,29 +284,13 @@ class MirrorTest(PathSetter):
conn.Globals.set('change_ownership', 1) conn.Globals.set('change_ownership', 1)
self.refresh(self.rootfiles, self.rootfiles_out, self.refresh(self.rootfiles, self.rootfiles_out,
Local.rootfiles, Local.rootfiles_out) # add uid/gid info Local.rootfiles, Local.rootfiles_out) # add uid/gid info
highlevel.Mirror(self.rootfiles, self.rootfiles_out) backup.Mirror(self.rootfiles, self.rootfiles_out)
assert CompareRecursive(Local.rootfiles, Local.rootfiles_out) assert CompareRecursive(Local.rootfiles, Local.rootfiles_out)
for coon in Globals.connections: for coon in Globals.connections:
conn.Globals.set('change_ownership', None) conn.Globals.set('change_ownership', None)
self.refresh(self.rootfiles, self.rootfiles_out, self.refresh(self.rootfiles, self.rootfiles_out,
Local.rootfiles, Local.rootfiles_out) # remove that info Local.rootfiles, Local.rootfiles_out) # remove that info
def testRoot2Local(self):
"""Make sure we can backup a directory we don't own"""
self.setPathnames(None, None, None, None)
Globals.change_ownership = Globals.change_source_perms = None
self.refresh(self.rootfiles2, self.rootfiles_out2,
Local.rootfiles2, Local.rootfiles_out2) # add uid/gid info
self.Mirror(self.rootfiles2, self.rootfiles_out2)
assert CompareRecursive(Local.rootfiles2, Local.rootfiles_out2)
self.refresh(self.rootfiles2, self.rootfiles_out2,
Local.rootfiles2, Local.rootfiles_out2) # remove that info
self.Mirror(self.rootfiles21, self.rootfiles_out2)
assert CompareRecursive(Local.rootfiles21, Local.rootfiles_out2)
self.refresh(self.rootfiles21, self.rootfiles_out2,
Local.rootfiles21, Local.rootfiles_out2) # remove that info
Globals.change_source_perms = 1
def deleteoutput(self): def deleteoutput(self):
assert not os.system("rm -rf testfiles/output*") assert not os.system("rm -rf testfiles/output*")
self.rbdir = self.rpout.append('rdiff-backup-data') self.rbdir = self.rpout.append('rdiff-backup-data')
...@@ -395,12 +333,12 @@ class MirrorTest(PathSetter): ...@@ -395,12 +333,12 @@ class MirrorTest(PathSetter):
assert CompareRecursive(Local.inc2rp, Local.rpout) assert CompareRecursive(Local.inc2rp, Local.rpout)
def Mirror(self, rpin, rpout): def Mirror(self, rpin, rpout):
"""Like highlevel.Mirror, but run misc_setup first""" """Like backup.Mirror, but setup first, cleanup later"""
Main.force = 1 Main.force = 1
Main.misc_setup([rpin, rpout]) Main.misc_setup([rpin, rpout])
Main.backup_set_select(rpin) Main.backup_set_select(rpin)
Main.backup_init_dirs(rpin, rpout) Main.backup_init_dirs(rpin, rpout)
highlevel.Mirror(rpin, rpout) backup.Mirror(rpin, rpout)
Log.close_logfile() Log.close_logfile()
Hardlink.clear_dictionaries() Hardlink.clear_dictionaries()
......
import unittest import unittest
from commontest import * from commontest import *
from rdiff_backup import log, restore, Globals, rpath from rdiff_backup import log, restore, Globals, rpath, TempFile
Log.setverbosity(3) Log.setverbosity(3)
lc = Globals.local_connection lc = Globals.local_connection
tempdir = rpath.RPath(Globals.local_connection, "testfiles/output")
restore_base_rp = rpath.RPath(Globals.local_connection,
"testfiles/restoretest")
restore_base_filenames = restore_base_rp.listdir()
mirror_time = 1041109438 # just some late time
class RestoreFileComparer:
"""Holds a file to be restored and tests against it
Each object has a restore file and a dictionary of times ->
rpaths. When the restore file is restored to one of the given
times, the resulting file should be the same as the related rpath.
"""
def __init__(self, rf):
self.rf = rf
self.time_rp_dict = {}
def add_rpath(self, rp, t):
"""Add rp, which represents what rf should be at given time t"""
assert not self.time_rp_dict.has_key(t)
self.time_rp_dict[t] = rp
def compare_at_time(self, t):
"""Restore file, make sure it is the same at time t"""
log.Log("Checking result at time %s" % (t,), 7)
tf = TempFile.new(tempdir.append("foo"))
restore._mirror_time = mirror_time
restore._rest_time = t
self.rf.set_relevant_incs()
out_rorpath = self.rf.get_attribs().getRORPath()
correct_result = self.time_rp_dict[t]
if out_rorpath.isreg():
out_rorpath.setfile(self.rf.get_restore_fp())
rpath.copy_with_attribs(out_rorpath, tf)
assert tf.equal_verbose(correct_result, check_index = 0), \
"%s, %s" % (tf, correct_result)
if tf.isreg():
assert rpath.cmpfileobj(tf.open("rb"), correct_result.open("rb"))
if tf.lstat(): tf.delete()
def compare_all(self):
"""Check restore results for all available times"""
for t in self.time_rp_dict.keys(): self.compare_at_time(t)
class RestoreTest(unittest.TestCase): class RestoreTest(unittest.TestCase):
"""Test Restore class""" """Test Restore class"""
prefix = "testfiles/restoretest/" def get_rfcs(self):
def maketesttuples(self, basename): """Return available RestoreFileCompararer objects"""
"""Make testing tuples from available files starting with prefix base_rf = restore.RestoreFile(restore_base_rp, restore_base_rp, [])
rfs = base_rf.yield_sub_rfs()
tuples is a sorted (oldest to newest) list of pairs (rp1, rp2) rfcs = []
where rp1 is an increment file and rp2 is the same but without for rf in rfs:
the final extension. incs is a list of all increment files. if rf.mirror_rp.dirsplit()[1] in ["dir"]:
log.Log("skipping 'dir'", 5)
""" continue
dirlist = os.listdir(self.prefix)
dirlist.sort() rfc = RestoreFileComparer(rf)
baselist = filter(lambda f: f.startswith(basename), dirlist) for inc in rf.inc_list:
rps = map(lambda f: rpath.RPath(lc, self.prefix+f), baselist) test_time = inc.getinctime()
incs = filter(lambda rp: rp.isincfile(), rps) rfc.add_rpath(self.get_correct(rf.mirror_rp, test_time),
tuples = map(lambda rp: (rp, rpath.RPath(lc, "%s.%s" % test_time)
(rp.getincbase().path, rfc.add_rpath(rf.mirror_rp, mirror_time)
rp.getinctime()))), rfcs.append(rfc)
incs) return rfcs
return tuples, incs
def get_correct(self, mirror_rp, test_time):
def restoreonefiletest(self, basename): """Return correct version with base mirror_rp at time test_time"""
tuples, incs = self.maketesttuples(basename) assert -1 < test_time < 2000000000, test_time
rpbase = rpath.RPath(lc, self.prefix + basename) dirname, basename = mirror_rp.dirsplit()
rptarget = rpath.RPath(lc, "testfiles/outfile") for filename in restore_base_filenames:
Hardlink.initialize_dictionaries() comps = filename.split(".")
base = ".".join(comps[:-1])
for pair in tuples: t = Time.stringtotime(comps[-1])
print "Processing file " + pair[0].path if t == test_time and basename == base:
if rptarget.lstat(): rptarget.delete() return restore_base_rp.append(filename)
rest_time = Time.stringtotime(pair[0].getinctime()) # Correct rp must be empty
rid = restore.RestoreIncrementData((), rpbase, incs) return restore_base_rp.append("%s.%s" %
rid.sortincseq(rest_time, 10000000000) # pick some really late time (basename, Time.timetostring(test_time)))
rcd = restore.RestoreCombinedData(rid, rpbase, rptarget)
rcd.RestoreFile() def testRestoreSingle(self):
#sorted_incs = Restore.sortincseq(rest_time, incs) """Test restoring files one at at a time"""
#Restore.RestoreFile(rest_time, rpbase, (), sorted_incs, rptarget) MakeOutputDir()
rptarget.setdata() for rfc in self.get_rfcs():
if not rptarget.lstat(): assert not pair[1].lstat() if rfc.rf.inc_rp.isincfile(): continue
elif not pair[1].lstat(): assert not rptarget.lstat() log.Log("Comparing %s" % (rfc.rf.inc_rp.path,), 5)
else: rfc.compare_all()
assert rpath.cmp(rptarget, pair[1]), \
"%s %s" % (rptarget.path, pair[1].path) def testBothLocal(self):
assert rpath.cmp_attribs(rptarget, pair[1]), \ """Test directory restore everything local"""
"%s %s" % (rptarget.path, pair[1].path) self.restore_dir_test(1,1)
rptarget.delete()
def testMirrorRemote(self):
def testsortincseq(self): """Test directory restore mirror is remote"""
"""Test the Restore.sortincseq function self.restore_dir_test(0, 1)
This test just makes sure that it comes up with the right def testDestRemote(self):
number of increments for each base name - given a list of """Test directory restore destination is remote"""
increments, we should eventually get sorted sequences that self.restore_dir_test(1, 0)
end in each one (each one will be the last increment once).
def testBothRemote(self):
""" """Test directory restore everything is remote"""
for basename in ['ocaml', 'mf']: self.restore_dir_test(0, 0)
tuples, unused = self.maketesttuples(basename)
incs = [tuple[0] for tuple in tuples] def restore_dir_test(self, mirror_local, dest_local):
"""Run whole dir tests
# Now we need a time newer than any inc
mirror_time = Time.stringtotime(incs[-1].getinctime()) + 10000 If any of the above tests don't work, try rerunning
makerestoretest3.
for inc, incbase in tuples:
assert inc.isincfile()
inctime = Time.stringtotime(inc.getinctime())
rid1 = restore.RestoreIncrementData(basename, incbase, incs)
rid1.sortincseq(inctime, mirror_time)
assert rid1.inc_list, rid1.inc_list
# oldest increment should be exactly inctime
ridtime = Time.stringtotime(rid1.inc_list[-1].getinctime())
assert ridtime == inctime, (ridtime, inctime)
def testRestorefiles(self):
"""Testing restoration of files one at a time"""
map(self.restoreonefiletest, ["ocaml", "mf"])
def testRestoreDir(self):
"""Test restoring from a real backup set
Run makerestoretest3 if this doesn't work.
"""
Myrm("testfiles/output")
InternalRestore(1, 1, "testfiles/restoretest3",
"testfiles/output", 20000)
src_rp = rpath.RPath(Globals.local_connection, "testfiles/increment2")
restore_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
assert CompareRecursive(src_rp, restore_rp)
def testRestoreCorrupt(self):
"""Test restoring a partially corrupt archive
The problem here is that a directory is missing from what is
to be restored, but because the previous backup was aborted in
the middle, some of the files in that directory weren't marked
as .missing.
""" """
Myrm("testfiles/output") Myrm("testfiles/output")
InternalRestore(1, 1, "testfiles/restoretest4", "testfiles/output", target_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
10000) mirror_rp = rpath.RPath(Globals.local_connection,
assert os.lstat("testfiles/output") "testfiles/restoretest3")
self.assertRaises(OSError, os.lstat, "testfiles/output/tmp") inc1_rp = rpath.RPath(Globals.local_connection,
self.assertRaises(OSError, os.lstat, "testfiles/output/rdiff-backup") "testfiles/increment1")
inc2_rp = rpath.RPath(Globals.local_connection,
"testfiles/increment2")
inc3_rp = rpath.RPath(Globals.local_connection,
"testfiles/increment3")
inc4_rp = rpath.RPath(Globals.local_connection,
"testfiles/increment4")
InternalRestore(mirror_local, dest_local, "testfiles/restoretest3",
"testfiles/output", 45000)
assert CompareRecursive(inc4_rp, target_rp)
InternalRestore(mirror_local, dest_local, "testfiles/restoretest3",
"testfiles/output", 35000)
assert CompareRecursive(inc3_rp, target_rp, compare_hardlinks = 0)
InternalRestore(mirror_local, dest_local, "testfiles/restoretest3",
"testfiles/output", 25000)
assert CompareRecursive(inc2_rp, target_rp, compare_hardlinks = 0)
InternalRestore(mirror_local, dest_local, "testfiles/restoretest3",
"testfiles/output", 5000)
assert CompareRecursive(inc1_rp, target_rp, compare_hardlinks = 0)
# def testRestoreCorrupt(self):
# """Test restoring a partially corrupt archive
#
# The problem here is that a directory is missing from what is
# to be restored, but because the previous backup was aborted in
# the middle, some of the files in that directory weren't marked
# as .missing.
#
# """
# Myrm("testfiles/output")
# InternalRestore(1, 1, "testfiles/restoretest4", "testfiles/output",
# 10000)
# assert os.lstat("testfiles/output")
# self.assertRaises(OSError, os.lstat, "testfiles/output/tmp")
# self.assertRaises(OSError, os.lstat, "testfiles/output/rdiff-backup")
def testRestoreNoincs(self): def testRestoreNoincs(self):
"""Test restoring a directory with no increments, just mirror""" """Test restoring a directory with no increments, just mirror"""
......
import os, unittest import os, unittest
from commontest import * from commontest import *
from rdiff_backup import rpath, robust, TempFile, Globals from rdiff_backup import rpath, robust, TempFile, Globals
class TestRobustAction(unittest.TestCase):
"""Test some robust actions"""
def testCopyWithAttribs(self):
"""Test copy with attribs action"""
rpin = rpath.RPath(Globals.local_connection, "./testfiles/robust/in")
fp = open("./testfiles/robust/in", "wb")
fp.write("hello there")
fp.close()
os.chmod("./testfiles/robust/in", 0604)
rpin.setdata()
assert rpin.isreg() and rpin.getperms() % 01000 == 0604
rpout = rpath.RPath(Globals.local_connection, "./testfiles/robust/out")
robust.copy_with_attribs_action(rpin, rpout).execute()
if not rpout == rpin:
print rpout, rpin
assert 0
rpout.delete()
rpin.delete()
class TempFileTest(unittest.TestCase): class TempFileTest(unittest.TestCase):
"""Test creation and management of tempfiles""" """Test creation and management of tempfiles in TempFile module"""
rp_base = rpath.RPath(Globals.local_connection, rp_base = rpath.RPath(Globals.local_connection,
"./testfiles/robust/testfile_base") "./testfiles/robust/testfile_base")
def testBasic(self): def testBasic(self):
...@@ -61,26 +40,19 @@ class TempFileTest(unittest.TestCase): ...@@ -61,26 +40,19 @@ class TempFileTest(unittest.TestCase):
assert destination.lstat() assert destination.lstat()
destination.delete() destination.delete()
class RobustTest(unittest.TestCase):
class SaveStateTest(unittest.TestCase): """Test robust module"""
"""Test SaveState class""" def test_check_common_error(self):
data_dir = rpath.RPath(Globals.local_connection, "testfiles/robust") """Test capturing errors"""
def testSymlinking(self): def cause_catchable_error(a):
"""Test recording last file with symlink""" os.lstat("aoenuthaoeu/aosutnhcg.4fpr,38p")
last_rorp = rpath.RORPath(('usr', 'local', 'bin', 'ls')) def cause_uncatchable_error():
Globals.rbdir = self.data_dir ansoethusaotneuhsaotneuhsaontehuaou
Time.setcurtime() result = robust.check_common_error(None, cause_catchable_error, [1])
SetConnections.BackupInitConnections(Globals.local_connection, assert result is None, result
Globals.local_connection) try: robust.check_common_error(None, cause_uncatchable_error)
robust.SaveState.init_filenames() except NameError: pass
robust.SaveState.record_last_file_action(last_rorp).execute() else: assert 0, "Key error not raised"
sym_rp = rpath.RPath(Globals.local_connection,
"testfiles/robust/last-file-incremented.%s.data" %
Time.curtimestr)
assert sym_rp.issym()
assert sym_rp.readlink() == "increments/usr/local/bin/ls"
sym_rp.delete()
if __name__ == '__main__': unittest.main() if __name__ == '__main__': unittest.main()
import unittest, os import unittest, os
from commontest import * from commontest import *
from rdiff_backup.log import * from rdiff_backup import Globals, log
from rdiff_backup import Globals
"""Root tests """Root tests
...@@ -11,7 +10,11 @@ that are meant to be run as root. ...@@ -11,7 +10,11 @@ that are meant to be run as root.
Globals.set('change_source_perms', None) Globals.set('change_source_perms', None)
Globals.counter = 0 Globals.counter = 0
Log.setverbosity(4) log.Log.setverbosity(4)
def Run(cmd):
print "Running: ", cmd
assert not os.system(cmd)
class RootTest(unittest.TestCase): class RootTest(unittest.TestCase):
dirlist1 = ["testfiles/root", "testfiles/various_file_types", "testfiles/increment4"] dirlist1 = ["testfiles/root", "testfiles/various_file_types", "testfiles/increment4"]
...@@ -21,8 +24,63 @@ class RootTest(unittest.TestCase): ...@@ -21,8 +24,63 @@ class RootTest(unittest.TestCase):
def testLocal2(self): BackupRestoreSeries(1, 1, self.dirlist2) def testLocal2(self): BackupRestoreSeries(1, 1, self.dirlist2)
def testRemote(self): BackupRestoreSeries(None, None, self.dirlist1) def testRemote(self): BackupRestoreSeries(None, None, self.dirlist1)
def tearDown(self): class NonRoot(unittest.TestCase):
os.system(MiscDir + "/myrm testfiles/output testfiles/rest_out") """Test backing up as non-root user
Test backing up a directory with files of different userids and
with device files in it, as a non-root user. When restoring as
root, everything should be restored normally.
"""
user = 'ben'
def make_root_dir(self):
"""Make directory createable only by root"""
rp = rpath.RPath(Globals.local_connection, "testfiles/root_out")
if rp.lstat(): Myrm(rp.path)
rp.mkdir()
rp1 = rp.append("1")
rp1.touch()
rp2 = rp.append("2")
rp2.touch()
rp2.chown(1, 1)
rp3 = rp.append("3")
rp3.touch()
rp3.chown(2, 2)
rp4 = rp.append("dev")
rp4.makedev('c', 4, 28)
return rp
def test_non_root(self):
"""Main non-root -> root test"""
Myrm("testfiles/output")
input_rp = self.make_root_dir()
Globals.change_ownership = 1
output_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
restore_rp = rpath.RPath(Globals.local_connection,
"testfiles/rest_out")
empty_rp = rpath.RPath(Globals.local_connection, "testfiles/empty")
backup_cmd = "rdiff-backup %s %s" % (input_rp.path, output_rp.path)
Run("su %s -c '%s'" % (self.user, backup_cmd))
Myrm("testfiles/rest_out")
restore_cmd = "rdiff-backup -r now %s %s" % (output_rp.path,
restore_rp.path,)
Run(restore_cmd)
assert CompareRecursive(input_rp, restore_rp)
backup_cmd = "rdiff-backup %s %s" % (empty_rp.path, output_rp.path)
Run("su %s -c '%s'" % (self.user, backup_cmd))
Myrm("testfiles/rest_out")
Run(restore_cmd)
assert CompareRecursive(empty_rp, restore_rp)
Myrm("testfiles/rest_out")
restore_cmd = "rdiff-backup -r 1 %s %s" % (output_rp.path,
restore_rp.path,)
Run(restore_cmd)
assert CompareRecursive(input_rp, restore_rp)
if __name__ == "__main__": unittest.main() if __name__ == "__main__": unittest.main()
...@@ -52,25 +52,6 @@ class RORPIterTest(unittest.TestCase): ...@@ -52,25 +52,6 @@ class RORPIterTest(unittest.TestCase):
iter([]))) iter([])))
def testCombinedPatching(self):
"""Combined signature, patch, and diff operations"""
if self.output.lstat():
Myrm(self.output.path)
self.output.setdata()
def turninto(final_rp):
sigfile = rorpiter.ToFile(rorpiter.GetSignatureIter(self.output))
diff_file = rorpiter.ToFile(rorpiter.GetDiffIter(
rorpiter.FromFile(sigfile), rorpiter.IterateRPaths(final_rp)))
rorpiter.PatchIter(self.output, rorpiter.FromFile(diff_file))
turninto(self.inc1rp)
rpath.copy_attribs(self.inc1rp, self.output) # Update time
assert self.compare_no_times(self.inc1rp, self.output)
turninto(self.inc2rp)
rpath.copy_attribs(self.inc2rp, self.output)
assert self.compare_no_times(self.inc2rp, self.output)
def compare_no_times(self, src_rp, dest_rp): def compare_no_times(self, src_rp, dest_rp):
"""Compare but disregard directories attributes""" """Compare but disregard directories attributes"""
def equal(src_rorp, dest_rorp): def equal(src_rorp, dest_rorp):
......
...@@ -417,19 +417,19 @@ testfiles/select**/2 ...@@ -417,19 +417,19 @@ testfiles/select**/2
("--exclude", "/")], ("--exclude", "/")],
[(), ("home",)]) [(), ("home",)])
def testParseStartingFrom(self): # def testParseStartingFrom(self):
"""Test parse, this time starting from inside""" # """Test parse, this time starting from inside"""
self.root = rpath.RPath(Globals.local_connection, "testfiles/select") # self.root = rpath.RPath(Globals.local_connection, "testfiles/select")
self.Select = Select(self.root) # self.Select = Select(self.root)
self.Select.ParseArgs([("--include", "testfiles/select/1/1"), # self.Select.ParseArgs([("--include", "testfiles/select/1/1"),
("--exclude", "**")], []) # ("--exclude", "**")], [])
self.Select.set_iter(('1', '1')) # self.Select.set_iter(('1', '1'))
assert lazy.Iter.equal(lazy.Iter.map(lambda dsrp: dsrp.index, # assert lazy.Iter.equal(lazy.Iter.map(lambda dsrp: dsrp.index,
self.Select), # self.Select),
iter([("1", '1', '1'), # iter([("1", '1', '1'),
('1', '1', '2'), # ('1', '1', '2'),
('1', '1', '3')]), # ('1', '1', '3')]),
verbose = 1) # verbose = 1)
if __name__ == "__main__": unittest.main() if __name__ == "__main__": unittest.main()
import unittest import unittest, time
from commontest import * from commontest import *
from rdiff_backup import statistics, rpath from rdiff_backup import statistics, rpath, restore
class StatsObjTest(unittest.TestCase): class StatsObjTest(unittest.TestCase):
"""Test StatsObj class""" """Test StatsObj class"""
...@@ -29,7 +29,7 @@ class StatsObjTest(unittest.TestCase): ...@@ -29,7 +29,7 @@ class StatsObjTest(unittest.TestCase):
self.set_obj(s) self.set_obj(s)
assert s.get_stat('SourceFiles') == 1 assert s.get_stat('SourceFiles') == 1
s1 = statistics.ITRB() s1 = statistics.StatFileObj()
assert s1.get_stat('SourceFiles') == 0 assert s1.get_stat('SourceFiles') == 0
def test_get_stats_string(self): def test_get_stats_string(self):
...@@ -40,10 +40,12 @@ class StatsObjTest(unittest.TestCase): ...@@ -40,10 +40,12 @@ class StatsObjTest(unittest.TestCase):
self.set_obj(s) self.set_obj(s)
stats_string = s.get_stats_string() stats_string = s.get_stats_string()
assert stats_string == \ ss_list = stats_string.split("\n")
"""StartTime 11.00 (Wed Dec 31 16:00:11 1969) tail = "\n".join(ss_list[2:]) # Time varies by time zone, don't check
EndTime 12.00 (Wed Dec 31 16:00:12 1969) #"""StartTime 11.00 (Wed Dec 31 16:00:11 1969)
ElapsedTime 1.00 (1 second) #EndTime 12.00 (Wed Dec 31 16:00:12 1969)"
assert tail == \
"""ElapsedTime 1.00 (1 second)
SourceFiles 1 SourceFiles 1
SourceFileSize 2 (2 bytes) SourceFileSize 2 (2 bytes)
MirrorFiles 13 MirrorFiles 13
...@@ -143,4 +145,81 @@ TotalDestinationSizeChange 7 (7 bytes) ...@@ -143,4 +145,81 @@ TotalDestinationSizeChange 7 (7 bytes)
assert s3.SourceFiles == 75 assert s3.SourceFiles == 75
class IncStatTest(unittest.TestCase):
"""Test statistics as produced by actual backup"""
def stats_check_initial(self, s):
"""Make sure stats object s compatible with initial mirroring
A lot of the off by one stuff is because the root directory
exists in the below examples.
"""
assert s.MirrorFiles == 1 or s.MirrorFiles == 0
assert s.MirrorFileSize < 20000
assert s.NewFiles <= s.SourceFiles <= s.NewFiles + 1
assert s.NewFileSize <= s.SourceFileSize <= s.NewFileSize + 20000
assert s.ChangedFiles == 1 or s.ChangedFiles == 0
assert s.ChangedSourceSize < 20000
assert s.ChangedMirrorSize < 20000
assert s.DeletedFiles == s.DeletedFileSize == 0
assert s.IncrementFileSize == 0
def testStatistics(self):
"""Test the writing of statistics
The file sizes are approximate because the size of directories
could change with different file systems...
"""
Globals.compression = 1
Myrm("testfiles/output")
InternalBackup(1, 1, "testfiles/stattest1", "testfiles/output")
InternalBackup(1, 1, "testfiles/stattest2", "testfiles/output",
time.time()+1)
rbdir = rpath.RPath(Globals.local_connection,
"testfiles/output/rdiff-backup-data")
#incs = Restore.get_inclist(rbdir.append("subdir").
# append("directory_statistics"))
#assert len(incs) == 2
#s1 = StatsObj().read_stats_from_rp(incs[0]) # initial mirror stats
#assert s1.SourceFiles == 2
#assert 400000 < s1.SourceFileSize < 420000
#self.stats_check_initial(s1)
#subdir_stats = StatsObj().read_stats_from_rp(incs[1]) # increment stats
#assert subdir_stats.SourceFiles == 2
#assert 400000 < subdir_stats.SourceFileSize < 420000
#assert subdir_stats.MirrorFiles == 2
#assert 400000 < subdir_stats.MirrorFileSize < 420000
#assert subdir_stats.NewFiles == subdir_stats.NewFileSize == 0
#assert subdir_stats.DeletedFiles == subdir_stats.DeletedFileSize == 0
#assert subdir_stats.ChangedFiles == 2
#assert 400000 < subdir_stats.ChangedSourceSize < 420000
#assert 400000 < subdir_stats.ChangedMirrorSize < 420000
#assert 10 < subdir_stats.IncrementFileSize < 20000
incs = restore.get_inclist(rbdir.append("session_statistics"))
assert len(incs) == 2
s2 = statistics.StatsObj().read_stats_from_rp(incs[0])
assert s2.SourceFiles == 7
assert 700000 <= s2.SourceFileSize < 750000
self.stats_check_initial(s2)
root_stats = statistics.StatsObj().read_stats_from_rp(incs[1])
assert root_stats.SourceFiles == 7, root_stats.SourceFiles
assert 550000 <= root_stats.SourceFileSize < 570000
assert root_stats.MirrorFiles == 7
assert 700000 <= root_stats.MirrorFileSize < 750000
assert root_stats.NewFiles == 1
assert root_stats.NewFileSize == 0
assert root_stats.DeletedFiles == 1
assert root_stats.DeletedFileSize == 200000
assert 3 <= root_stats.ChangedFiles <= 4, root_stats.ChangedFiles
assert 450000 <= root_stats.ChangedSourceSize < 470000
assert 400000 <= root_stats.ChangedMirrorSize < 420000, \
root_stats.ChangedMirrorSize
assert 10 < root_stats.IncrementFileSize < 30000
if __name__ == "__main__": unittest.main() if __name__ == "__main__": unittest.main()
...@@ -6,7 +6,7 @@ class TimeTest(unittest.TestCase): ...@@ -6,7 +6,7 @@ class TimeTest(unittest.TestCase):
def testConversion(self): def testConversion(self):
"""test timetostring and stringtotime""" """test timetostring and stringtotime"""
Time.setcurtime() Time.setcurtime()
assert type(Time.curtime) is types.FloatType assert type(Time.curtime) is types.FloatType or types.LongType
assert type(Time.curtimestr) is types.StringType assert type(Time.curtimestr) is types.StringType
assert (Time.cmp(int(Time.curtime), Time.curtimestr) == 0 or assert (Time.cmp(int(Time.curtime), Time.curtimestr) == 0 or
Time.cmp(int(Time.curtime) + 1, Time.curtimestr) == 0) Time.cmp(int(Time.curtime) + 1, Time.curtimestr) == 0)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment