Commit ab547632 authored by ben's avatar ben

Bugfixed the new statistics stuff


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@108 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent 381c3c2c
......@@ -24,14 +24,12 @@ class HighLevel:
accompanying diagram.
"""
def Mirror(src_rpath, dest_rpath, checkpoint = 1,
session_info = None, write_finaldata = 1):
def Mirror(src_rpath, dest_rpath, inc_rpath = None, session_info = None):
"""Turn dest_rpath into a copy of src_rpath
Checkpoint true means to checkpoint periodically, otherwise
not. If session_info is given, try to resume Mirroring from
that point. If write_finaldata is true, save extra data files
like hardlink_data. If it is false, make a complete mirror.
If inc_rpath is true, then this is the initial mirroring of an
incremental backup, so checkpoint and write to data_dir.
Otherwise only mirror and don't create any extra files.
"""
SourceS = src_rpath.conn.HLSourceStruct
......@@ -42,8 +40,9 @@ class HighLevel:
src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
diffiter = SourceS.get_diffs_and_finalize(dest_sigiter)
DestS.patch_and_finalize(dest_rpath, diffiter,
checkpoint, write_finaldata)
if inc_rpath:
DestS.patch_w_datadir_writes(dest_rpath, diffiter, inc_rpath)
else: DestS.patch_and_finalize(dest_rpath, diffiter)
dest_rpath.setdata()
......@@ -207,8 +206,13 @@ class HLDestinationStruct:
iitr.override_changed()
return iitr
def patch_and_finalize(cls, dest_rpath, diffs,
checkpoint = 1, write_finaldata = 1):
def get_MirrorITR(cls, inc_rpath):
"""Return MirrorITR, starting from state if available"""
if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR
else: return MirrorITR(inc_rpath)
def patch_and_finalize(cls, dest_rpath, diffs):
"""Apply diffs and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
finalizer = cls.get_finalizer()
......@@ -229,12 +233,36 @@ class HLDestinationStruct:
while 1:
try: dsrp = cls.check_skip_error(error_checked, dsrp)
except StopIteration: break
if checkpoint: SaveState.checkpoint_mirror(finalizer, dsrp)
except: cls.handle_last_error(dsrp, finalizer)
except: Log.exception(1)
finalizer.Finish()
if Globals.preserve_hardlinks and write_finaldata:
Hardlink.final_writedata()
if checkpoint: SaveState.checkpoint_remove()
def patch_w_datadir_writes(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs and finalize, with checkpointing and statistics"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
finalizer, ITR = cls.get_finalizer(), cls.get_MirrorITR(inc_rpath)
dsrp = None
def error_checked():
"""Inner writing loop, check this for errors"""
indexed_tuple = collated.next()
Log("Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and diff_rorp.isplaceholder(): diff_rorp = None
ITR(dsrp.index, diff_rorp, dsrp)
finalizer(dsrp.index, dsrp)
return dsrp
try:
while 1:
try: dsrp = cls.check_skip_error(error_checked, dsrp)
except StopIteration: break
SaveState.checkpoint(ITR, finalizer, dsrp)
cls.check_skip_error(ITR.Finish, dsrp)
cls.check_skip_error(finalizer.Finish, dsrp)
except: cls.handle_last_error(dsrp, finalizer, ITR)
if Globals.preserve_hardlinks: Hardlink.final_writedata()
SaveState.checkpoint_remove()
def patch_increment_and_finalize(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs, write increment if necessary, and finalize"""
......@@ -258,7 +286,7 @@ class HLDestinationStruct:
while 1:
try: dsrp = cls.check_skip_error(error_checked, dsrp)
except StopIteration: break
SaveState.checkpoint_inc_backup(ITR, finalizer, dsrp)
SaveState.checkpoint(ITR, finalizer, dsrp)
cls.check_skip_error(ITR.Finish, dsrp)
cls.check_skip_error(finalizer.Finish, dsrp)
except: cls.handle_last_error(dsrp, finalizer, ITR)
......@@ -285,11 +313,10 @@ class HLDestinationStruct:
Log.exception(1,2)
raise
def handle_last_error(cls, dsrp, finalizer, ITR = None):
def handle_last_error(cls, dsrp, finalizer, ITR):
"""If catch fatal error, try to checkpoint before exiting"""
Log.exception(1)
if ITR: SaveState.checkpoint_inc_backup(ITR, finalizer, dsrp, 1)
else: SaveState.checkpoint_mirror(finalizer, dsrp, 1)
SaveState.checkpoint(ITR, finalizer, dsrp, 1)
if Globals.preserve_hardlinks: Hardlink.final_checkpoint(Globals.rbdir)
SaveState.touch_last_file_definitive()
raise
......
......@@ -79,28 +79,28 @@ class Inc:
RPath.copy_attribs(mirrordir, dirsign)
return RobustAction(lambda: None, final, dirsign.delete)
def get_inc(rp, time, typestr):
"""Return increment like rp but with time and typestr suffixes"""
addtostr = lambda s: "%s.%s.%s" % (s, Time.timetostring(time), typestr)
if rp.index:
incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] +
(addtostr(rp.index[-1]),))
else: incrp = rp.__class__(rp.conn, addtostr(rp.base), rp.index)
if Globals.quoting_enabled: incrp.quote_path()
return incrp
def get_inc_ext(rp, typestr):
"""Return RPath/DSRPath like rp but with inc/time extension
"""Return increment with specified type and correct time
If the file exists, then probably a previous backup has been
aborted. We then keep asking FindTime to get a time later
than the one that already has an inc file.
"""
def get_newinc(timestr):
"""Get new increment rp with given time suffix"""
addtostr = lambda s: "%s.%s.%s" % (s, timestr, typestr)
if rp.index:
incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] +
(addtostr(rp.index[-1]),))
else: incrp = rp.__class__(rp.conn, addtostr(rp.base), rp.index)
if Globals.quoting_enabled: incrp.quote_path()
return incrp
inctime = 0
while 1:
inctime = Resume.FindTime(rp.index, inctime)
incrp = get_newinc(Time.timetostring(inctime))
incrp = Inc.get_inc(rp, inctime, typestr)
if not incrp.lstat(): break
Inc._inc_file = incrp
return incrp
......@@ -109,7 +109,7 @@ MakeStatic(Inc)
class IncrementITR(StatsITR):
"""Patch and increment iterator of increment triples
"""Patch and increment mirror directory
This has to be an ITR because directories that have files in them
changed are flagged with an increment marker. There are four
......@@ -208,7 +208,10 @@ class IncrementITR(StatsITR):
def end_process(self):
"""Do final work when leaving a tree (directory)"""
diff_rorp, dsrp, incpref = self.diff_rorp, self.dsrp, self.incpref
try: diff_rorp, dsrp, incpref = self.diff_rorp, self.dsrp, self.incpref
except AttributeError: # This weren't set because of some error
return
if self.mirror_isdirectory:
if not diff_rorp and not self.changed: return
......@@ -224,8 +227,9 @@ class IncrementITR(StatsITR):
self.end_stats(diff_rorp, dsrp, Inc._inc_file)
if self.incpref.isdir() and (self.mirror_isdirectory or dsrp.isdir()):
self.write_stats_to_rp(Inc.get_inc_ext(
self.incpref.append("directory_statistics"), "data"))
self.write_stats_to_rp(Inc.get_inc(
self.incpref.append("directory_statistics"),
Time.curtime, "data"))
def branch_process(self, subinstance):
"""Update statistics, and the has_changed flag if change in branch"""
......@@ -233,3 +237,37 @@ class IncrementITR(StatsITR):
self.add_file_stats(subinstance)
class MirrorITR(StatsITR):
"""Like IncrementITR, but only patch mirror directory, don't increment"""
def __init__(self, inc_rpath):
"""Set inc_rpath, an rpath of the base of the inc tree"""
self.inc_rpath = inc_rpath
StatsITR.__init__(self, inc_rpath)
def start_process(self, index, diff_rorp, mirror_dsrp):
"""Initialize statistics, do actual writing to mirror"""
self.start_stats(mirror_dsrp)
if diff_rorp and not diff_rorp.isplaceholder():
RORPIter.patchonce_action(None, mirror_dsrp, diff_rorp).execute()
self.incpref = self.inc_rpath.new_index(index)
if mirror_dsrp.isdir() and not self.incpref.lstat():
self.incpref.mkdir() # holds the statistics files
self.diff_rorp, self.mirror_dsrp = diff_rorp, mirror_dsrp
def end_process(self):
"""Update statistics when leaving"""
try: diff_rorp, mirror_dsrp = self.diff_rorp, self.mirror_dsrp
except AttributeError: # Some error above prevented these being set
return
self.end_stats(self.diff_rorp, self.mirror_dsrp)
if self.incpref.isdir():
self.write_stats_to_rp(Inc.get_inc(
self.incpref.append("directory_statistics"),
Time.curtime, "data"))
def branch_process(self, subinstance):
"""Update statistics with subdirectory results"""
self.add_file_stats(subinstance)
......@@ -315,23 +315,16 @@ class SaveState:
_last_checkpoint_time = 0 # time in seconds of last checkpoint
_checkpoint_rp = None # RPath of checkpoint data pickle
def init_filenames(cls, incrementing):
"""Set rpaths of markers. Assume rbdir already set.
If incrementing, then indicate increment operation, otherwise
indicate mirror.
"""
def init_filenames(cls):
"""Set rpaths of markers. Assume rbdir already set."""
if not Globals.isbackup_writer:
return Globals.backup_writer.SaveState.init_filenames(incrementing)
return Globals.backup_writer.SaveState.init_filenames()
assert Globals.local_connection is Globals.rbdir.conn, \
(Globals.rbdir.conn, Globals.backup_writer)
if incrementing: cls._last_file_sym = Globals.rbdir.append(
cls._last_file_sym = Globals.rbdir.append(
"last-file-incremented.%s.data" % Time.curtimestr)
else: cls._last_file_sym = Globals.rbdir.append(
"last-file-mirrored.%s.data" % Time.curtimestr)
cls._checkpoint_rp = Globals.rbdir.append(
"checkpoint-data.%s.data" % Time.curtimestr)
cls._last_file_definitive_rp = Globals.rbdir.append(
......@@ -367,8 +360,7 @@ class SaveState:
else: return RobustAction(lambda: None, cls.touch_last_file,
lambda exc: None)
def checkpoint_inc_backup(cls, ITR, finalizer, last_file_rorp,
override = None):
def checkpoint(cls, ITR, finalizer, last_file_rorp, override = None):
"""Save states of tree reducer and finalizer during inc backup
If override is true, checkpoint even if one isn't due.
......@@ -384,20 +376,6 @@ class SaveState:
state_string),
cls.record_last_file_action(last_file_rorp)]).execute()
def checkpoint_mirror(cls, finalizer, last_file_rorp, override = None):
"""For a mirror, only finalizer and last_file should be saved"""
if not override and not cls.checkpoint_needed(): return
if not cls._checkpoint_rp:
Log("Warning, _checkpoint_rp not set yet", 2)
return
cls._last_checkpoint_time = time.time()
Log("Writing checkpoint time %s" % cls._last_checkpoint_time, 7)
state_string = cPickle.dumps(finalizer)
Robust.chain([Robust.destructive_write_action(cls._checkpoint_rp,
state_string),
cls.record_last_file_action(last_file_rorp)]).execute()
def checkpoint_needed(cls):
"""Returns true if another checkpoint is called for"""
return (time.time() > cls._last_checkpoint_time +
......@@ -522,9 +500,7 @@ class Resume:
def unpickle_checkpoint(cls, checkpoint_rp):
"""Read data from checkpoint_rp and return unpickled data
Return value is pair finalizer state for a mirror checkpoint,
and (patch increment ITR, finalizer state) for increment
checkpoint.
Return value is pair (patch increment ITR, finalizer state).
"""
fp = checkpoint_rp.open("rb")
......
......@@ -38,21 +38,21 @@ class StatsObj:
"""Return string printing out statistics"""
timelist = []
if self.StartTime is not None:
timelist.append("StartTime %s (%s)" %
timelist.append("StartTime %s (%s)\n" %
(self.StartTime, Time.timetopretty(self.StartTime)))
if self.EndTime is not None:
timelist.append("EndTime %s (%s)" %
timelist.append("EndTime %s (%s)\n" %
(self.EndTime, Time.timetopretty(self.EndTime)))
if self.StartTime is not None and self.EndTime is not None:
if self.ElapsedTime is None:
self.ElapsedTime = self.EndTime - self.StartTime
timelist.append("ElapsedTime %s (%s)" %
timelist.append("ElapsedTime %s (%s)\n" %
(self.ElapsedTime, Time.inttopretty(self.ElapsedTime)))
filelist = ["%s %s" % (attr, self.get_stat(attr))
filelist = ["%s %s\n" % (attr, self.get_stat(attr))
for attr in self.stat_file_attrs
if self.get_stat(attr) is not None]
return "\n".join(timelist + filelist)
return "".join(timelist + filelist)
def init_stats_from_string(self, s):
"""Initialize attributes from string, return self for convenience"""
......@@ -157,7 +157,7 @@ class StatsITR(IterTreeReducer, StatsObj):
self.DeletedFiles += 1
self.DeletedFileSize += self.mirror_base_size
self.IncrementFileSize += inc_rp and inc_rp.getsize() or 0
else: assert None # One of before and after should exist
def add_file_stats(self, subinstance):
"""Add all file statistics from subinstance to current totals"""
......
......@@ -24,14 +24,12 @@ class HighLevel:
accompanying diagram.
"""
def Mirror(src_rpath, dest_rpath, checkpoint = 1,
session_info = None, write_finaldata = 1):
def Mirror(src_rpath, dest_rpath, inc_rpath = None, session_info = None):
"""Turn dest_rpath into a copy of src_rpath
Checkpoint true means to checkpoint periodically, otherwise
not. If session_info is given, try to resume Mirroring from
that point. If write_finaldata is true, save extra data files
like hardlink_data. If it is false, make a complete mirror.
If inc_rpath is true, then this is the initial mirroring of an
incremental backup, so checkpoint and write to data_dir.
Otherwise only mirror and don't create any extra files.
"""
SourceS = src_rpath.conn.HLSourceStruct
......@@ -42,8 +40,9 @@ class HighLevel:
src_init_dsiter = SourceS.split_initial_dsiter()
dest_sigiter = DestS.get_sigs(dest_rpath, src_init_dsiter)
diffiter = SourceS.get_diffs_and_finalize(dest_sigiter)
DestS.patch_and_finalize(dest_rpath, diffiter,
checkpoint, write_finaldata)
if inc_rpath:
DestS.patch_w_datadir_writes(dest_rpath, diffiter, inc_rpath)
else: DestS.patch_and_finalize(dest_rpath, diffiter)
dest_rpath.setdata()
......@@ -207,8 +206,13 @@ class HLDestinationStruct:
iitr.override_changed()
return iitr
def patch_and_finalize(cls, dest_rpath, diffs,
checkpoint = 1, write_finaldata = 1):
def get_MirrorITR(cls, inc_rpath):
"""Return MirrorITR, starting from state if available"""
if cls._session_info and cls._session_info.ITR:
return cls._session_info.ITR
else: return MirrorITR(inc_rpath)
def patch_and_finalize(cls, dest_rpath, diffs):
"""Apply diffs and finalize"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
finalizer = cls.get_finalizer()
......@@ -229,12 +233,36 @@ class HLDestinationStruct:
while 1:
try: dsrp = cls.check_skip_error(error_checked, dsrp)
except StopIteration: break
if checkpoint: SaveState.checkpoint_mirror(finalizer, dsrp)
except: cls.handle_last_error(dsrp, finalizer)
except: Log.exception(1)
finalizer.Finish()
if Globals.preserve_hardlinks and write_finaldata:
Hardlink.final_writedata()
if checkpoint: SaveState.checkpoint_remove()
def patch_w_datadir_writes(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs and finalize, with checkpointing and statistics"""
collated = RORPIter.CollateIterators(diffs, cls.initial_dsiter2)
finalizer, ITR = cls.get_finalizer(), cls.get_MirrorITR(inc_rpath)
dsrp = None
def error_checked():
"""Inner writing loop, check this for errors"""
indexed_tuple = collated.next()
Log("Processing %s" % str(indexed_tuple), 7)
diff_rorp, dsrp = indexed_tuple
if not dsrp: dsrp = cls.get_dsrp(dest_rpath, diff_rorp.index)
if diff_rorp and diff_rorp.isplaceholder(): diff_rorp = None
ITR(dsrp.index, diff_rorp, dsrp)
finalizer(dsrp.index, dsrp)
return dsrp
try:
while 1:
try: dsrp = cls.check_skip_error(error_checked, dsrp)
except StopIteration: break
SaveState.checkpoint(ITR, finalizer, dsrp)
cls.check_skip_error(ITR.Finish, dsrp)
cls.check_skip_error(finalizer.Finish, dsrp)
except: cls.handle_last_error(dsrp, finalizer, ITR)
if Globals.preserve_hardlinks: Hardlink.final_writedata()
SaveState.checkpoint_remove()
def patch_increment_and_finalize(cls, dest_rpath, diffs, inc_rpath):
"""Apply diffs, write increment if necessary, and finalize"""
......@@ -258,7 +286,7 @@ class HLDestinationStruct:
while 1:
try: dsrp = cls.check_skip_error(error_checked, dsrp)
except StopIteration: break
SaveState.checkpoint_inc_backup(ITR, finalizer, dsrp)
SaveState.checkpoint(ITR, finalizer, dsrp)
cls.check_skip_error(ITR.Finish, dsrp)
cls.check_skip_error(finalizer.Finish, dsrp)
except: cls.handle_last_error(dsrp, finalizer, ITR)
......@@ -285,11 +313,10 @@ class HLDestinationStruct:
Log.exception(1,2)
raise
def handle_last_error(cls, dsrp, finalizer, ITR = None):
def handle_last_error(cls, dsrp, finalizer, ITR):
"""If catch fatal error, try to checkpoint before exiting"""
Log.exception(1)
if ITR: SaveState.checkpoint_inc_backup(ITR, finalizer, dsrp, 1)
else: SaveState.checkpoint_mirror(finalizer, dsrp, 1)
SaveState.checkpoint(ITR, finalizer, dsrp, 1)
if Globals.preserve_hardlinks: Hardlink.final_checkpoint(Globals.rbdir)
SaveState.touch_last_file_definitive()
raise
......
......@@ -79,28 +79,28 @@ class Inc:
RPath.copy_attribs(mirrordir, dirsign)
return RobustAction(lambda: None, final, dirsign.delete)
def get_inc(rp, time, typestr):
"""Return increment like rp but with time and typestr suffixes"""
addtostr = lambda s: "%s.%s.%s" % (s, Time.timetostring(time), typestr)
if rp.index:
incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] +
(addtostr(rp.index[-1]),))
else: incrp = rp.__class__(rp.conn, addtostr(rp.base), rp.index)
if Globals.quoting_enabled: incrp.quote_path()
return incrp
def get_inc_ext(rp, typestr):
"""Return RPath/DSRPath like rp but with inc/time extension
"""Return increment with specified type and correct time
If the file exists, then probably a previous backup has been
aborted. We then keep asking FindTime to get a time later
than the one that already has an inc file.
"""
def get_newinc(timestr):
"""Get new increment rp with given time suffix"""
addtostr = lambda s: "%s.%s.%s" % (s, timestr, typestr)
if rp.index:
incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] +
(addtostr(rp.index[-1]),))
else: incrp = rp.__class__(rp.conn, addtostr(rp.base), rp.index)
if Globals.quoting_enabled: incrp.quote_path()
return incrp
inctime = 0
while 1:
inctime = Resume.FindTime(rp.index, inctime)
incrp = get_newinc(Time.timetostring(inctime))
incrp = Inc.get_inc(rp, inctime, typestr)
if not incrp.lstat(): break
Inc._inc_file = incrp
return incrp
......@@ -109,7 +109,7 @@ MakeStatic(Inc)
class IncrementITR(StatsITR):
"""Patch and increment iterator of increment triples
"""Patch and increment mirror directory
This has to be an ITR because directories that have files in them
changed are flagged with an increment marker. There are four
......@@ -208,7 +208,10 @@ class IncrementITR(StatsITR):
def end_process(self):
"""Do final work when leaving a tree (directory)"""
diff_rorp, dsrp, incpref = self.diff_rorp, self.dsrp, self.incpref
try: diff_rorp, dsrp, incpref = self.diff_rorp, self.dsrp, self.incpref
except AttributeError: # This weren't set because of some error
return
if self.mirror_isdirectory:
if not diff_rorp and not self.changed: return
......@@ -224,8 +227,9 @@ class IncrementITR(StatsITR):
self.end_stats(diff_rorp, dsrp, Inc._inc_file)
if self.incpref.isdir() and (self.mirror_isdirectory or dsrp.isdir()):
self.write_stats_to_rp(Inc.get_inc_ext(
self.incpref.append("directory_statistics"), "data"))
self.write_stats_to_rp(Inc.get_inc(
self.incpref.append("directory_statistics"),
Time.curtime, "data"))
def branch_process(self, subinstance):
"""Update statistics, and the has_changed flag if change in branch"""
......@@ -233,3 +237,37 @@ class IncrementITR(StatsITR):
self.add_file_stats(subinstance)
class MirrorITR(StatsITR):
"""Like IncrementITR, but only patch mirror directory, don't increment"""
def __init__(self, inc_rpath):
"""Set inc_rpath, an rpath of the base of the inc tree"""
self.inc_rpath = inc_rpath
StatsITR.__init__(self, inc_rpath)
def start_process(self, index, diff_rorp, mirror_dsrp):
"""Initialize statistics, do actual writing to mirror"""
self.start_stats(mirror_dsrp)
if diff_rorp and not diff_rorp.isplaceholder():
RORPIter.patchonce_action(None, mirror_dsrp, diff_rorp).execute()
self.incpref = self.inc_rpath.new_index(index)
if mirror_dsrp.isdir() and not self.incpref.lstat():
self.incpref.mkdir() # holds the statistics files
self.diff_rorp, self.mirror_dsrp = diff_rorp, mirror_dsrp
def end_process(self):
"""Update statistics when leaving"""
try: diff_rorp, mirror_dsrp = self.diff_rorp, self.mirror_dsrp
except AttributeError: # Some error above prevented these being set
return
self.end_stats(self.diff_rorp, self.mirror_dsrp)
if self.incpref.isdir():
self.write_stats_to_rp(Inc.get_inc(
self.incpref.append("directory_statistics"),
Time.curtime, "data"))
def branch_process(self, subinstance):
"""Update statistics with subdirectory results"""
self.add_file_stats(subinstance)
......@@ -203,9 +203,7 @@ class Main:
# Since no "rdiff-backup-data" dir, use root of destination.
SetConnections.UpdateGlobal('rbdir', dest_rp)
SetConnections.BackupInitConnections(src_rp.conn, dest_rp.conn)
RSI = Globals.backup_writer.Resume.ResumeCheck()
SaveState.init_filenames(None)
HighLevel.Mirror(src_rp, dest_rp, 1, RSI, None)
HighLevel.Mirror(src_rp, dest_rp)
def mirror_check_paths(self, rpin, rpout):
"""Check paths and return rpin, rpout"""
......@@ -224,13 +222,11 @@ rdiff-backup with the --force option if you want to mirror anyway.""" %
self.backup_init_select(rpin, rpout)
self.backup_init_dirs(rpin, rpout)
RSI = Globals.backup_writer.Resume.ResumeCheck()
SaveState.init_filenames()
if self.prevtime:
Time.setprevtime(self.prevtime)
SaveState.init_filenames(1)
HighLevel.Mirror_and_increment(rpin, rpout, self.incdir, RSI)
else:
SaveState.init_filenames(None)
HighLevel.Mirror(rpin, rpout, 1, RSI)
else: HighLevel.Mirror(rpin, rpout, self.incdir, RSI)
self.backup_touch_curmirror(rpin, rpout)
def backup_init_select(self, rpin, rpout):
......
......@@ -315,23 +315,16 @@ class SaveState:
_last_checkpoint_time = 0 # time in seconds of last checkpoint
_checkpoint_rp = None # RPath of checkpoint data pickle
def init_filenames(cls, incrementing):
"""Set rpaths of markers. Assume rbdir already set.
If incrementing, then indicate increment operation, otherwise
indicate mirror.
"""
def init_filenames(cls):
"""Set rpaths of markers. Assume rbdir already set."""
if not Globals.isbackup_writer:
return Globals.backup_writer.SaveState.init_filenames(incrementing)
return Globals.backup_writer.SaveState.init_filenames()
assert Globals.local_connection is Globals.rbdir.conn, \
(Globals.rbdir.conn, Globals.backup_writer)
if incrementing: cls._last_file_sym = Globals.rbdir.append(
cls._last_file_sym = Globals.rbdir.append(
"last-file-incremented.%s.data" % Time.curtimestr)
else: cls._last_file_sym = Globals.rbdir.append(
"last-file-mirrored.%s.data" % Time.curtimestr)
cls._checkpoint_rp = Globals.rbdir.append(
"checkpoint-data.%s.data" % Time.curtimestr)
cls._last_file_definitive_rp = Globals.rbdir.append(
......@@ -367,8 +360,7 @@ class SaveState:
else: return RobustAction(lambda: None, cls.touch_last_file,
lambda exc: None)
def checkpoint_inc_backup(cls, ITR, finalizer, last_file_rorp,
override = None):
def checkpoint(cls, ITR, finalizer, last_file_rorp, override = None):
"""Save states of tree reducer and finalizer during inc backup
If override is true, checkpoint even if one isn't due.
......@@ -384,20 +376,6 @@ class SaveState:
state_string),
cls.record_last_file_action(last_file_rorp)]).execute()
def checkpoint_mirror(cls, finalizer, last_file_rorp, override = None):
"""For a mirror, only finalizer and last_file should be saved"""
if not override and not cls.checkpoint_needed(): return
if not cls._checkpoint_rp:
Log("Warning, _checkpoint_rp not set yet", 2)
return
cls._last_checkpoint_time = time.time()
Log("Writing checkpoint time %s" % cls._last_checkpoint_time, 7)
state_string = cPickle.dumps(finalizer)
Robust.chain([Robust.destructive_write_action(cls._checkpoint_rp,
state_string),
cls.record_last_file_action(last_file_rorp)]).execute()
def checkpoint_needed(cls):
"""Returns true if another checkpoint is called for"""
return (time.time() > cls._last_checkpoint_time +
......@@ -522,9 +500,7 @@ class Resume:
def unpickle_checkpoint(cls, checkpoint_rp):
"""Read data from checkpoint_rp and return unpickled data
Return value is pair finalizer state for a mirror checkpoint,
and (patch increment ITR, finalizer state) for increment
checkpoint.
Return value is pair (patch increment ITR, finalizer state).
"""
fp = checkpoint_rp.open("rb")
......
......@@ -38,21 +38,21 @@ class StatsObj:
"""Return string printing out statistics"""
timelist = []
if self.StartTime is not None:
timelist.append("StartTime %s (%s)" %
timelist.append("StartTime %s (%s)\n" %
(self.StartTime, Time.timetopretty(self.StartTime)))
if self.EndTime is not None:
timelist.append("EndTime %s (%s)" %
timelist.append("EndTime %s (%s)\n" %
(self.EndTime, Time.timetopretty(self.EndTime)))
if self.StartTime is not None and self.EndTime is not None:
if self.ElapsedTime is None:
self.ElapsedTime = self.EndTime - self.StartTime
timelist.append("ElapsedTime %s (%s)" %
timelist.append("ElapsedTime %s (%s)\n" %
(self.ElapsedTime, Time.inttopretty(self.ElapsedTime)))
filelist = ["%s %s" % (attr, self.get_stat(attr))
filelist = ["%s %s\n" % (attr, self.get_stat(attr))
for attr in self.stat_file_attrs
if self.get_stat(attr) is not None]
return "\n".join(timelist + filelist)
return "".join(timelist + filelist)
def init_stats_from_string(self, s):
"""Initialize attributes from string, return self for convenience"""
......@@ -157,7 +157,7 @@ class StatsITR(IterTreeReducer, StatsObj):
self.DeletedFiles += 1
self.DeletedFileSize += self.mirror_base_size
self.IncrementFileSize += inc_rp and inc_rp.getsize() or 0
else: assert None # One of before and after should exist
def add_file_stats(self, subinstance):
"""Add all file statistics from subinstance to current totals"""
......
......@@ -82,7 +82,7 @@ def InternalBackup(source_local, dest_local, src_dir, dest_dir,
_get_main().cleanup()
def InternalMirror(source_local, dest_local, src_dir, dest_dir,
checkpointing = None):
write_data = None):
"""Mirror src to dest internally, like InternalBackup"""
remote_schema = '%s'
......@@ -97,15 +97,18 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir,
_get_main().misc_setup([rpin, rpout])
_get_main().backup_init_select(rpin, rpout)
if not rpout.lstat(): rpout.mkdir()
if checkpointing: # use rdiff-backup-data dir to checkpoint
if write_data: # use rdiff-backup-data dir to checkpoint
data_dir = rpout.append("rdiff-backup-data")
if not data_dir.lstat(): data_dir.mkdir()
SetConnections.UpdateGlobal('rbdir', data_dir)
else: # just use root directory to hold checkpoints
SetConnections.UpdateGlobal('rbdir', rpout)
SetConnections.BackupInitConnections(rpin.conn, rpout.conn)
SaveState.init_filenames(None)
HighLevel.Mirror(rpin, rpout, checkpointing, None, write_finaldata = None)
if write_data:
SaveState.init_filenames()
HighLevel.Mirror(rpin, rpout, Globals.rbdir.append("increments"))
else: HighLevel.Mirror(rpin, rpout)
_get_main().cleanup()
def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
......
......@@ -150,7 +150,7 @@ class PathSetter(unittest.TestCase):
assert len(self.getinc_paths("nochange.",
"testfiles/output/rdiff-backup-data/increments")) == 0
assert len(self.getinc_paths("",
"testfiles/output/rdiff-backup-data/increments/nochange")) == 0
"testfiles/output/rdiff-backup-data/increments/nochange")) == 1
def getinc_paths(self, basename, directory):
"""Return increment.______.dir paths"""
......
......@@ -164,6 +164,23 @@ class inctest(unittest.TestCase):
class inctest2(unittest.TestCase):
"""Like inctest but contains more elaborate tests"""
def stats_check_initial(self, s):
"""Make sure stats object s compatible with initial mirroring
A lot of the off by one stuff is because the root directory
exists in the below examples.
"""
assert s.MirrorFiles == 1 or s.MirrorFiles == 0
assert s.MirrorFileSize < 20000
assert s.NewFiles <= s.SourceFiles <= s.NewFiles + 1
assert s.NewFileSize <= s.SourceFileSize <= s.NewFileSize + 20000
assert s.ChangedFiles == 1 or s.ChangedFiles == 0
assert s.ChangedSourceSize < 20000
assert s.ChangedMirrorSize < 20000
assert s.DeletedFiles == s.DeletedFileSize == 0
assert s.IncrementFileSize == 0
def testStatistics(self):
"""Test the writing of statistics
......@@ -181,8 +198,13 @@ class inctest2(unittest.TestCase):
incs = Restore.get_inclist(inc_base.append("subdir").
append("directory_statistics"))
assert len(incs) == 1
subdir_stats = StatsObj().read_stats_from_rp(incs[0])
assert len(incs) == 2
s1 = StatsObj().read_stats_from_rp(incs[0]) # initial mirror stats
assert s1.SourceFiles == 2
assert 400000 < s1.SourceFileSize < 420000
self.stats_check_initial(s1)
subdir_stats = StatsObj().read_stats_from_rp(incs[1]) # increment stats
assert subdir_stats.SourceFiles == 2
assert 400000 < subdir_stats.SourceFileSize < 420000
assert subdir_stats.MirrorFiles == 2
......@@ -195,8 +217,13 @@ class inctest2(unittest.TestCase):
assert 10 < subdir_stats.IncrementFileSize < 20000
incs = Restore.get_inclist(inc_base.append("directory_statistics"))
assert len(incs) == 1
root_stats = StatsObj().read_stats_from_rp(incs[0])
assert len(incs) == 2
s2 = StatsObj().read_stats_from_rp(incs[0])
assert s2.SourceFiles == 7
assert 700000 < s2.SourceFileSize < 750000
self.stats_check_initial(s2)
root_stats = StatsObj().read_stats_from_rp(incs[1])
assert root_stats.SourceFiles == 7
assert 550000 < root_stats.SourceFileSize < 570000
assert root_stats.MirrorFiles == 7
......@@ -208,6 +235,6 @@ class inctest2(unittest.TestCase):
assert 3 <= root_stats.ChangedFiles <= 4, root_stats.ChangedFiles
assert 450000 < root_stats.ChangedSourceSize < 470000
assert 400000 < root_stats.ChangedMirrorSize < 420000
assert 10 < subdir_stats.IncrementtFileSize < 30000
assert 10 < subdir_stats.IncrementFileSize < 30000
if __name__ == '__main__': unittest.main()
......@@ -191,7 +191,7 @@ class IncrementTest2(PathSetter):
def runtest(self):
"""After setting connections, etc., run actual test using this"""
Time.setcurtime()
SaveState.init_filenames(1)
SaveState.init_filenames()
_get_main().backup_init_select(Local.inc1rp, Local.rpout)
HighLevel.Mirror(self.inc1rp, self.rpout)
......@@ -295,7 +295,7 @@ class MirrorTest(PathSetter):
"Test mirroring a directory that has no permissions"
self.setPathnames(None, None, None, None)
Time.setcurtime()
SaveState.init_filenames(None)
SaveState.init_filenames()
self.Mirror(self.noperms, self.noperms_out, None)
# Can't compare because we don't have the permissions to do it right
#assert CompareRecursive(Local.noperms, Local.noperms_out)
......@@ -304,8 +304,8 @@ class MirrorTest(PathSetter):
"No permissions mirroring (remote)"
self.setPathnames('test1', '../', 'test2/tmp', '../../')
Time.setcurtime()
SaveState.init_filenames(None)
self.Mirror(self.noperms, self.noperms_out, checkpoint=None)
SaveState.init_filenames()
self.Mirror(self.noperms, self.noperms_out, None)
#assert CompareRecursive(Local.noperms, Local.noperms_out)
def testPermSkipLocal(self):
......@@ -313,8 +313,8 @@ class MirrorTest(PathSetter):
self.setPathnames(None, None, None, None)
Globals.change_source_perms = None
Time.setcurtime()
SaveState.init_filenames(None)
self.Mirror(self.one_unreadable, self.one_unreadable_out, checkpoint=None)
SaveState.init_filenames()
self.Mirror(self.one_unreadable, self.one_unreadable_out)
Globals.change_source_perms = 1
self.Mirror(self.one_unreadable, self.one_unreadable_out)
# Could add test, but for now just make sure it doesn't exit
......@@ -324,7 +324,7 @@ class MirrorTest(PathSetter):
self.setPathnames('test1', '../', 'test2/tmp', '../../')
Globals.change_source_perms = None
Time.setcurtime()
SaveState.init_filenames(None)
SaveState.init_filenames()
self.Mirror(self.one_unreadable, self.one_unreadable_out)
Globals.change_source_perms = 1
self.Mirror(self.one_unreadable, self.one_unreadable_out)
......@@ -394,7 +394,7 @@ class MirrorTest(PathSetter):
def runtest(self):
Time.setcurtime()
SaveState.init_filenames(None)
SaveState.init_filenames()
assert self.rbdir.lstat()
self.Mirror(self.inc1rp, self.rpout)
assert CompareRecursive(Local.inc1rp, Local.rpout)
......@@ -409,7 +409,7 @@ class MirrorTest(PathSetter):
self.reset_rps()
Time.setcurtime()
SaveState.init_filenames(None)
SaveState.init_filenames()
self.Mirror(self.inc1rp, self.rpout)
#RPath.copy_attribs(self.inc1rp, self.rpout)
assert CompareRecursive(Local.inc1rp, Local.rpout)
......@@ -417,10 +417,13 @@ class MirrorTest(PathSetter):
self.Mirror(self.inc2rp, self.rpout)
assert CompareRecursive(Local.inc2rp, Local.rpout)
def Mirror(self, rpin, rpout, checkpoint = 1):
def Mirror(self, rpin, rpout, write_increments = 1):
"""Like HighLevel.Mirror, but run misc_setup first"""
_get_main().misc_setup([rpin, rpout])
_get_main().backup_init_select(rpin, rpout)
HighLevel.Mirror(rpin, rpout, checkpoint)
if write_increments:
HighLevel.Mirror(rpin, rpout,
rpout.append_path("rdiff-backup-data/increments"))
else: HighLevel.Mirror(rpin, rpout)
if __name__ == "__main__": unittest.main()
......@@ -50,7 +50,8 @@ DeletedFileSize 6
ChangedFiles 7
ChangedSourceSize 8
ChangedMirrorSize 9
IncrementFileSize 10""", "'%s'" % stats_string
IncrementFileSize 10
""", "'%s'" % stats_string
def test_init_stats(self):
"""Test setting stat object from string"""
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment